diff --git a/README.md b/README.md
index 2151330..e5c97a7 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# matteo-the-prestige
# simsim discord bot
-blaseball, blaseball, is back! in an unofficial capacity.
+blaseball, blaseball, is back! in an unofficial capacity. this is completely unaffiliated with the game band
custom players, custom teams, custom leagues (that last one is coming soon™) all in discord!
diff --git a/games.py b/games.py
index 0bf07ee..805e4c4 100644
--- a/games.py
+++ b/games.py
@@ -107,11 +107,11 @@ class team(object):
self.slogan = None
def add_lineup(self, new_player):
- if len(self.lineup) <= 12:
+ if len(self.lineup) < 20:
self.lineup.append(new_player)
return (True,)
else:
- return (False, "12 players in the lineup, maximum. We're being generous here.")
+ return (False, "20 players in the lineup, maximum. We're being really generous here.")
def set_pitcher(self, new_player):
self.pitcher = new_player
@@ -149,6 +149,7 @@ class game(object):
self.last_update = ({},0) #this is a ({outcome}, runs) tuple
self.owner = None
self.ready = False
+ self.victory_lap = False
if length is not None:
self.max_innings = length
else:
@@ -175,9 +176,12 @@ class game(object):
batter = self.get_batter()
if self.top_of_inning:
- defender = random.choice(self.teams["home"].lineup)
+ defender_list = self.teams["home"].lineup.copy()
else:
- defender = random.choice(self.teams["away"].lineup)
+ defender_list = self.teams["away"].lineup.copy()
+
+ defender_list.append(pitcher)
+ defender = random.choice(defender_list) #make pitchers field
outcome["batter"] = batter
outcome["defender"] = ""
@@ -530,6 +534,16 @@ class game(object):
"home_pitcher" : self.teams["home"].pitcher
}
+ def named_bases(self):
+ name_bases = {}
+ for base in range(1,4):
+ if self.bases[base] is not None:
+ name_bases[base] = self.bases[base].name
+ else:
+ name_bases[base] = None
+
+ return name_bases
+
def gamestate_update_full(self):
attempts = self.thievery_attempts()
diff --git a/main_controller.py b/main_controller.py
new file mode 100644
index 0000000..51612e2
--- /dev/null
+++ b/main_controller.py
@@ -0,0 +1,129 @@
+import asyncio, time, datetime, games, json, threading
+from flask import Flask, url_for, Response, render_template, request, jsonify
+from flask_socketio import SocketIO, emit
+
+app = Flask("the-prestige")
+app.config['SECRET KEY'] = 'dev'
+socketio = SocketIO(app)
+
+@app.route('/')
+def index():
+ return render_template("index.html")
+
+@app.route("/gotoboop")
+def get_game_states():
+ return states_to_send
+
+@socketio.on("recieved")
+def handle_new_conn(data):
+ socketio.emit("states_update", last_update, room=request.sid)
+
+thread2 = threading.Thread(target=socketio.run,args=(app,))
+thread2.start()
+
+master_games_dic = {} #key timestamp : (game game, {} state)
+last_update = {}
+
+
+def update_loop():
+ while True:
+ states_to_send = {}
+ game_times = iter(master_games_dic.copy().keys())
+ for game_time in game_times:
+ this_game, state, discrim_string = master_games_dic[game_time]
+ test_string = this_game.gamestate_display_full()
+ print(discrim_string)
+ state["leagueoruser"] = discrim_string
+ state["display_inning"] = this_game.inning #games need to be initialized with the following keys in state:
+ state["outs"] = this_game.outs #away_name
+ state["pitcher"] = this_game.get_pitcher().name #home_name
+ state["batter"] = this_game.get_batter().name #max_innings
+ state["away_score"] = this_game.teams["away"].score #top_of_inning = True
+ state["home_score"] = this_game.teams["home"].score #update_pause = 0
+ #victory_lap = False
+ if test_string == "Game not started.": #weather_emoji
+ state["update_emoji"] = "🍿" #weather_text
+ state["update_text"] = "Play blall!" #they also need a timestamp
+ state["start_delay"] -= 1
+
+ state["display_top_of_inning"] = state["top_of_inning"]
+
+ if state["start_delay"] <= 0:
+ if this_game.top_of_inning != state["top_of_inning"]:
+ state["update_pause"] = 2
+ state["pitcher"] = "-"
+ state["batter"] = "-"
+ if not state["top_of_inning"]:
+ state["display_inning"] -= 1
+ state["display_top_of_inning"] = False
+
+ if state["update_pause"] == 1:
+ state["update_emoji"] = "🍿"
+ if this_game.over:
+ state["display_inning"] -= 1
+ state["display_top_of_inning"] = False
+ winning_team = this_game.teams['home'].name if this_game.teams['home'].score > this_game.teams['away'].score else this_game.teams['away'].name
+ if this_game.victory_lap and winning_team == this_game.teams['home'].name:
+ state["update_text"] = f"{winning_team} wins with a victory lap!"
+ elif winning_team == this_game.teams['home'].name:
+ state["update_text"] = f"{winning_team} wins, shaming {this_game.teams['away'].name}!"
+ else:
+ state["update_text"] = f"{winning_team} wins!"
+ state["pitcher"] = "-"
+ state["batter"] = "-"
+ elif this_game.top_of_inning:
+ state["update_text"] = f"Top of {this_game.inning}. {this_game.teams['away'].name} batting!"
+ else:
+ if this_game.inning >= this_game.max_innings:
+ if this_game.teams["home"].score > this_game.teams["away"].score:
+ this_game.victory_lap = True
+ state["update_text"] = f"Bottom of {this_game.inning}. {this_game.teams['home'].name} batting!"
+
+ elif state["update_pause"] != 1 and test_string != "Game not started.":
+ if "steals" in this_game.last_update[0].keys():
+ updatestring = ""
+ for attempt in this_game.last_update[0]["steals"]:
+ updatestring += attempt + "\n"
+
+ state["update_emoji"] = "💎"
+ state["update_text"] = updatestring
+
+ else:
+ updatestring = ""
+ punc = ""
+ if this_game.last_update[0]["defender"] != "":
+ punc = ". "
+
+ if "fc_out" in this_game.last_update[0].keys():
+ name, base_string = this_game.last_update[0]['fc_out']
+ updatestring = f"{this_game.last_update[0]['batter']} {this_game.last_update[0]['text'].value.format(name, base_string)} {this_game.last_update[0]['defender']}{punc}"
+ else:
+ updatestring = f"{this_game.last_update[0]['batter']} {this_game.last_update[0]['text'].value} {this_game.last_update[0]['defender']}{punc}"
+ if this_game.last_update[1] > 0:
+ updatestring += f"{this_game.last_update[1]} runs scored!"
+
+ state["update_emoji"] = "🏏"
+ state["update_text"] = updatestring
+
+ state["bases"] = this_game.named_bases()
+
+ state["top_of_inning"] = this_game.top_of_inning
+
+ states_to_send[game_time] = state
+
+ if state["update_pause"] <= 1 and state["start_delay"] < 0:
+ if this_game.over:
+ state["update_pause"] = 2
+ if state["end_delay"] < 0:
+ master_games_dic.pop(game_time)
+ state["end_delay"] -= 1
+ else:
+ this_game.gamestate_update_full()
+
+ state["update_pause"] -= 1
+
+ global last_update
+ last_update = states_to_send
+
+ socketio.emit("states_update", states_to_send)
+ time.sleep(6)
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/INSTALLER b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/LICENSE.rst b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/LICENSE.rst
new file mode 100644
index 0000000..9d227a0
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/METADATA b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/METADATA
new file mode 100644
index 0000000..db7fcd1
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/METADATA
@@ -0,0 +1,137 @@
+Metadata-Version: 2.1
+Name: Flask
+Version: 1.1.2
+Summary: A simple framework for building complex web applications.
+Home-page: https://palletsprojects.com/p/flask/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://flask.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/flask
+Project-URL: Issue tracker, https://github.com/pallets/flask/issues
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Framework :: Flask
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+Requires-Dist: Werkzeug (>=0.15)
+Requires-Dist: Jinja2 (>=2.10.1)
+Requires-Dist: itsdangerous (>=0.24)
+Requires-Dist: click (>=5.1)
+Provides-Extra: dev
+Requires-Dist: pytest ; extra == 'dev'
+Requires-Dist: coverage ; extra == 'dev'
+Requires-Dist: tox ; extra == 'dev'
+Requires-Dist: sphinx ; extra == 'dev'
+Requires-Dist: pallets-sphinx-themes ; extra == 'dev'
+Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'dev'
+Requires-Dist: sphinx-issues ; extra == 'dev'
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: pallets-sphinx-themes ; extra == 'docs'
+Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'docs'
+Requires-Dist: sphinx-issues ; extra == 'docs'
+Provides-Extra: dotenv
+Requires-Dist: python-dotenv ; extra == 'dotenv'
+
+Flask
+=====
+
+Flask is a lightweight `WSGI`_ web application framework. It is designed
+to make getting started quick and easy, with the ability to scale up to
+complex applications. It began as a simple wrapper around `Werkzeug`_
+and `Jinja`_ and has become one of the most popular Python web
+application frameworks.
+
+Flask offers suggestions, but doesn't enforce any dependencies or
+project layout. It is up to the developer to choose the tools and
+libraries they want to use. There are many extensions provided by the
+community that make adding new functionality easy.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ pip install -U Flask
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+ from flask import Flask
+
+ app = Flask(__name__)
+
+ @app.route("/")
+ def hello():
+ return "Hello, World!"
+
+.. code-block:: text
+
+ $ env FLASK_APP=hello.py flask run
+ * Serving Flask app "hello"
+ * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
+
+
+Contributing
+------------
+
+For guidance on setting up a development environment and how to make a
+contribution to Flask, see the `contributing guidelines`_.
+
+.. _contributing guidelines: https://github.com/pallets/flask/blob/master/CONTRIBUTING.rst
+
+
+Donate
+------
+
+The Pallets organization develops and supports Flask and the libraries
+it uses. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20
+
+
+Links
+-----
+
+* Website: https://palletsprojects.com/p/flask/
+* Documentation: https://flask.palletsprojects.com/
+* Releases: https://pypi.org/project/Flask/
+* Code: https://github.com/pallets/flask
+* Issue tracker: https://github.com/pallets/flask/issues
+* Test status: https://dev.azure.com/pallets/flask/_build
+* Official chat: https://discord.gg/t6rrQZH
+
+.. _WSGI: https://wsgi.readthedocs.io
+.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/
+.. _Jinja: https://www.palletsprojects.com/p/jinja/
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/RECORD b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/RECORD
new file mode 100644
index 0000000..8fa81f3
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/RECORD
@@ -0,0 +1,49 @@
+../../Scripts/flask.exe,sha256=KiH4h59NIDb6Al7dChbDgRnZM3G0HVlQ8Og8S6W1VGE,97149
+Flask-1.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Flask-1.1.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
+Flask-1.1.2.dist-info/METADATA,sha256=3INpPWH6nKfZ33R2N-bQZy4TOe1wQCMweZc9mwcNrtc,4591
+Flask-1.1.2.dist-info/RECORD,,
+Flask-1.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Flask-1.1.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+Flask-1.1.2.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42
+Flask-1.1.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6
+flask/__init__.py,sha256=YnA9wkwbJcnb_jTT-nMsMFeFE_UWt33khKzdHmMSuyI,1894
+flask/__main__.py,sha256=fjVtt3QTANXlpJCOv3Ha7d5H-76MwzSIOab7SFD9TEk,254
+flask/__pycache__/__init__.cpython-38.pyc,,
+flask/__pycache__/__main__.cpython-38.pyc,,
+flask/__pycache__/_compat.cpython-38.pyc,,
+flask/__pycache__/app.cpython-38.pyc,,
+flask/__pycache__/blueprints.cpython-38.pyc,,
+flask/__pycache__/cli.cpython-38.pyc,,
+flask/__pycache__/config.cpython-38.pyc,,
+flask/__pycache__/ctx.cpython-38.pyc,,
+flask/__pycache__/debughelpers.cpython-38.pyc,,
+flask/__pycache__/globals.cpython-38.pyc,,
+flask/__pycache__/helpers.cpython-38.pyc,,
+flask/__pycache__/logging.cpython-38.pyc,,
+flask/__pycache__/sessions.cpython-38.pyc,,
+flask/__pycache__/signals.cpython-38.pyc,,
+flask/__pycache__/templating.cpython-38.pyc,,
+flask/__pycache__/testing.cpython-38.pyc,,
+flask/__pycache__/views.cpython-38.pyc,,
+flask/__pycache__/wrappers.cpython-38.pyc,,
+flask/_compat.py,sha256=8KPT54Iig96TuLipdogLRHNYToIcg-xPhnSV5VRERnw,4099
+flask/app.py,sha256=tmEhx_XrIRP24vZg39dHMWFzJ2jj-YxIcd51LaIT5cE,98059
+flask/blueprints.py,sha256=vkdm8NusGsfZUeIfPdCluj733QFmiQcT4Sk1tuZLUjw,21400
+flask/cli.py,sha256=SIb22uq9wYBeB2tKMl0pYdhtZ1MAQyZtPL-3m6es4G0,31035
+flask/config.py,sha256=3dejvQRYfNHw_V7dCLMxU8UNFpL34xIKemN7gHZIZ8Y,10052
+flask/ctx.py,sha256=cks-omGedkxawHFo6bKIrdOHsJCAgg1i_NWw_htxb5U,16724
+flask/debughelpers.py,sha256=-whvPKuAoU8AZ9c1z_INuOeBgfYDqE1J2xNBsoriugU,6475
+flask/globals.py,sha256=OgcHb6_NCyX6-TldciOdKcyj4PNfyQwClxdMhvov6aA,1637
+flask/helpers.py,sha256=IHa578HU_3XAAo1wpXQv24MYRYO5TzaiDQQwvUIcE6Q,43074
+flask/json/__init__.py,sha256=6nITbZYiYOPB8Qfi1-dvsblwn01KRz8VOsMBIZyaYek,11988
+flask/json/__pycache__/__init__.cpython-38.pyc,,
+flask/json/__pycache__/tag.cpython-38.pyc,,
+flask/json/tag.py,sha256=vq9GOllg_0kTWKuVFrwmkeOQzR-jdBD23x-89JyCCQI,8306
+flask/logging.py,sha256=WcY5UkqTysGfmosyygSlXyZYGwOp3y-VsE6ehoJ48dk,3250
+flask/sessions.py,sha256=G0KsEkr_i1LG_wOINwFSOW3ts7Xbv4bNgEZKc7TRloc,14360
+flask/signals.py,sha256=yYLOed2x8WnQ7pirGalQYfpYpCILJ0LJhmNSrnWvjqw,2212
+flask/templating.py,sha256=F8E_IZXn9BGsjMzUJ5N_ACMyZdiFBp_SSEaUunvfZ7g,4939
+flask/testing.py,sha256=WXsciCQbHBP7xjHqNvOA4bT0k86GvSNpgzncfXLDEEg,10146
+flask/views.py,sha256=eeWnadLAj0QdQPLtjKipDetRZyG62CT2y7fNOFDJz0g,5802
+flask/wrappers.py,sha256=kgsvtZuMM6RQaDqhRbc5Pcj9vqTnaERl2pmXcdGL7LU,4736
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/REQUESTED b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/WHEEL b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/WHEEL
new file mode 100644
index 0000000..8b701e9
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/entry_points.txt b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/entry_points.txt
new file mode 100644
index 0000000..1eb0252
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+flask = flask.cli:main
+
diff --git a/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/top_level.txt b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/top_level.txt
new file mode 100644
index 0000000..7e10602
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+flask
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/INSTALLER b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/LICENSE b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/LICENSE
new file mode 100644
index 0000000..f5c10ab
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Miguel Grinberg
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/METADATA b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/METADATA
new file mode 100644
index 0000000..7b2e141
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/METADATA
@@ -0,0 +1,27 @@
+Metadata-Version: 2.1
+Name: Flask-SocketIO
+Version: 5.0.1
+Summary: Socket.IO integration for Flask applications
+Home-page: http://github.com/miguelgrinberg/Flask-SocketIO/
+Author: Miguel Grinberg
+Author-email: miguelgrinberg50@gmail.com
+License: MIT
+Platform: any
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Dist: Flask (>=0.9)
+Requires-Dist: python-socketio (>=5.0.2)
+
+
+Flask-SocketIO
+--------------
+
+Socket.IO integration for Flask applications.
+
+
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/RECORD b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/RECORD
new file mode 100644
index 0000000..a75fbfa
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/RECORD
@@ -0,0 +1,13 @@
+Flask_SocketIO-5.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Flask_SocketIO-5.0.1.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082
+Flask_SocketIO-5.0.1.dist-info/METADATA,sha256=ZGkOXFiwpUToh06H483U7XKXKi7tybYhE9ajnguMpFE,825
+Flask_SocketIO-5.0.1.dist-info/RECORD,,
+Flask_SocketIO-5.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Flask_SocketIO-5.0.1.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+Flask_SocketIO-5.0.1.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15
+flask_socketio/__init__.py,sha256=hy8Sh1yu3zsZF3QUC0_U4VNsBumswPBAWoqNOlgA8wk,47464
+flask_socketio/__pycache__/__init__.cpython-38.pyc,,
+flask_socketio/__pycache__/namespace.cpython-38.pyc,,
+flask_socketio/__pycache__/test_client.cpython-38.pyc,,
+flask_socketio/namespace.py,sha256=mt8S3u-Iwn-wHaV0QwVMMItOQZtc-A2bxMnlwQHLuxw,2019
+flask_socketio/test_client.py,sha256=YW7mz4jO3XZAg1Xg_QDy4va233oyYO9Uo9JHJ0v2FCA,10269
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/REQUESTED b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/WHEEL b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/WHEEL
new file mode 100644
index 0000000..01b8fc7
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/top_level.txt b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/top_level.txt
new file mode 100644
index 0000000..ba82ec3
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Flask_SocketIO-5.0.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+flask_socketio
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/INSTALLER b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/LICENSE.rst b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/LICENSE.rst
new file mode 100644
index 0000000..c37cae4
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/METADATA b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/METADATA
new file mode 100644
index 0000000..55c0f82
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/METADATA
@@ -0,0 +1,106 @@
+Metadata-Version: 2.1
+Name: Jinja2
+Version: 2.11.2
+Summary: A very fast and expressive template engine.
+Home-page: https://palletsprojects.com/p/jinja/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://jinja.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/jinja
+Project-URL: Issue tracker, https://github.com/pallets/jinja/issues
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+Description-Content-Type: text/x-rst
+Requires-Dist: MarkupSafe (>=0.23)
+Provides-Extra: i18n
+Requires-Dist: Babel (>=0.8) ; extra == 'i18n'
+
+Jinja
+=====
+
+Jinja is a fast, expressive, extensible templating engine. Special
+placeholders in the template allow writing code similar to Python
+syntax. Then the template is passed data to render the final document.
+
+It includes:
+
+- Template inheritance and inclusion.
+- Define and import macros within templates.
+- HTML templates can use autoescaping to prevent XSS from untrusted
+ user input.
+- A sandboxed environment can safely render untrusted templates.
+- AsyncIO support for generating templates and calling async
+ functions.
+- I18N support with Babel.
+- Templates are compiled to optimized Python code just-in-time and
+ cached, or can be compiled ahead-of-time.
+- Exceptions point to the correct line in templates to make debugging
+ easier.
+- Extensible filters, tests, functions, and even syntax.
+
+Jinja's philosophy is that while application logic belongs in Python if
+possible, it shouldn't make the template designer's job difficult by
+restricting functionality too much.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ $ pip install -U Jinja2
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+In A Nutshell
+-------------
+
+.. code-block:: jinja
+
+ {% extends "base.html" %}
+ {% block title %}Members{% endblock %}
+ {% block content %}
+
+ {% endblock %}
+
+
+Links
+-----
+
+- Website: https://palletsprojects.com/p/jinja/
+- Documentation: https://jinja.palletsprojects.com/
+- Releases: https://pypi.org/project/Jinja2/
+- Code: https://github.com/pallets/jinja
+- Issue tracker: https://github.com/pallets/jinja/issues
+- Test status: https://dev.azure.com/pallets/jinja/_build
+- Official chat: https://discord.gg/t6rrQZH
+
+
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/RECORD b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/RECORD
new file mode 100644
index 0000000..d1a691f
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/RECORD
@@ -0,0 +1,61 @@
+Jinja2-2.11.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Jinja2-2.11.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
+Jinja2-2.11.2.dist-info/METADATA,sha256=5ZHRZoIRAMHsJPnqhlJ622_dRPsYePYJ-9EH4-Ry7yI,3535
+Jinja2-2.11.2.dist-info/RECORD,,
+Jinja2-2.11.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+Jinja2-2.11.2.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61
+Jinja2-2.11.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
+jinja2/__init__.py,sha256=0QCM_jKKDM10yzSdHRVV4mQbCbDqf0GN0GirAqibn9Y,1549
+jinja2/__pycache__/__init__.cpython-38.pyc,,
+jinja2/__pycache__/_compat.cpython-38.pyc,,
+jinja2/__pycache__/_identifier.cpython-38.pyc,,
+jinja2/__pycache__/asyncfilters.cpython-38.pyc,,
+jinja2/__pycache__/asyncsupport.cpython-38.pyc,,
+jinja2/__pycache__/bccache.cpython-38.pyc,,
+jinja2/__pycache__/compiler.cpython-38.pyc,,
+jinja2/__pycache__/constants.cpython-38.pyc,,
+jinja2/__pycache__/debug.cpython-38.pyc,,
+jinja2/__pycache__/defaults.cpython-38.pyc,,
+jinja2/__pycache__/environment.cpython-38.pyc,,
+jinja2/__pycache__/exceptions.cpython-38.pyc,,
+jinja2/__pycache__/ext.cpython-38.pyc,,
+jinja2/__pycache__/filters.cpython-38.pyc,,
+jinja2/__pycache__/idtracking.cpython-38.pyc,,
+jinja2/__pycache__/lexer.cpython-38.pyc,,
+jinja2/__pycache__/loaders.cpython-38.pyc,,
+jinja2/__pycache__/meta.cpython-38.pyc,,
+jinja2/__pycache__/nativetypes.cpython-38.pyc,,
+jinja2/__pycache__/nodes.cpython-38.pyc,,
+jinja2/__pycache__/optimizer.cpython-38.pyc,,
+jinja2/__pycache__/parser.cpython-38.pyc,,
+jinja2/__pycache__/runtime.cpython-38.pyc,,
+jinja2/__pycache__/sandbox.cpython-38.pyc,,
+jinja2/__pycache__/tests.cpython-38.pyc,,
+jinja2/__pycache__/utils.cpython-38.pyc,,
+jinja2/__pycache__/visitor.cpython-38.pyc,,
+jinja2/_compat.py,sha256=B6Se8HjnXVpzz9-vfHejn-DV2NjaVK-Iewupc5kKlu8,3191
+jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775
+jinja2/asyncfilters.py,sha256=XJtYXTxFvcJ5xwk6SaDL4S0oNnT0wPYvXBCSzc482fI,4250
+jinja2/asyncsupport.py,sha256=ZBFsDLuq3Gtji3Ia87lcyuDbqaHZJRdtShZcqwpFnSQ,7209
+jinja2/bccache.py,sha256=3Pmp4jo65M9FQuIxdxoDBbEDFwe4acDMQf77nEJfrHA,12139
+jinja2/compiler.py,sha256=Ta9W1Lit542wItAHXlDcg0sEOsFDMirCdlFPHAurg4o,66284
+jinja2/constants.py,sha256=RR1sTzNzUmKco6aZicw4JpQpJGCuPuqm1h1YmCNUEFY,1458
+jinja2/debug.py,sha256=neR7GIGGjZH3_ILJGVUYy3eLQCCaWJMXOb7o0kGInWc,8529
+jinja2/defaults.py,sha256=85B6YUUCyWPSdrSeVhcqFVuu_bHUAQXeey--FIwSeVQ,1126
+jinja2/environment.py,sha256=XDSLKc4SqNLMOwTSq3TbWEyA5WyXfuLuVD0wAVjEFwM,50629
+jinja2/exceptions.py,sha256=VjNLawcmf2ODffqVMCQK1cRmvFaUfQWF4u8ouP3QPcE,5425
+jinja2/ext.py,sha256=AtwL5O5enT_L3HR9-oBvhGyUTdGoyaqG_ICtnR_EVd4,26441
+jinja2/filters.py,sha256=_RpPgAlgIj7ExvyDzcHAC3B36cocfWK-1TEketbNeM0,41415
+jinja2/idtracking.py,sha256=J3O4VHsrbf3wzwiBc7Cro26kHb6_5kbULeIOzocchIU,9211
+jinja2/lexer.py,sha256=nUFLRKhhKmmEWkLI65nQePgcQs7qsRdjVYZETMt_v0g,30331
+jinja2/loaders.py,sha256=C-fST_dmFjgWkp0ZuCkrgICAoOsoSIF28wfAFink0oU,17666
+jinja2/meta.py,sha256=QjyYhfNRD3QCXjBJpiPl9KgkEkGXJbAkCUq4-Ur10EQ,4131
+jinja2/nativetypes.py,sha256=Ul__gtVw4xH-0qvUvnCNHedQeNDwmEuyLJztzzSPeRg,2753
+jinja2/nodes.py,sha256=Mk1oJPVgIjnQw9WOqILvcu3rLepcFZ0ahxQm2mbwDwc,31095
+jinja2/optimizer.py,sha256=gQLlMYzvQhluhzmAIFA1tXS0cwgWYOjprN-gTRcHVsc,1457
+jinja2/parser.py,sha256=fcfdqePNTNyvosIvczbytVA332qpsURvYnCGcjDHSkA,35660
+jinja2/runtime.py,sha256=0y-BRyIEZ9ltByL2Id6GpHe1oDRQAwNeQvI0SKobNMw,30618
+jinja2/sandbox.py,sha256=knayyUvXsZ-F0mk15mO2-ehK9gsw04UhB8td-iUOtLc,17127
+jinja2/tests.py,sha256=iO_Y-9Vo60zrVe1lMpSl5sKHqAxe2leZHC08OoZ8K24,4799
+jinja2/utils.py,sha256=OoVMlQe9S2-lWT6jJbTu9tDuDvGNyWUhHDcE51i5_Do,22522
+jinja2/visitor.py,sha256=DUHupl0a4PGp7nxRtZFttUzAi1ccxzqc2hzetPYUz8U,3240
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/WHEEL b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/WHEEL
new file mode 100644
index 0000000..ef99c6c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/entry_points.txt b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/entry_points.txt
new file mode 100644
index 0000000..3619483
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[babel.extractors]
+jinja2 = jinja2.ext:babel_extract [i18n]
+
diff --git a/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/top_level.txt b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/top_level.txt
new file mode 100644
index 0000000..7f7afbf
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+jinja2
diff --git a/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst
new file mode 100644
index 0000000..9d227a0
--- /dev/null
+++ b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/METADATA b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/METADATA
new file mode 100644
index 0000000..c50370d
--- /dev/null
+++ b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/METADATA
@@ -0,0 +1,105 @@
+Metadata-Version: 2.1
+Name: MarkupSafe
+Version: 1.1.1
+Summary: Safely add untrusted strings to HTML/XML markup.
+Home-page: https://palletsprojects.com/p/markupsafe/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: The Pallets Team
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://markupsafe.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/markupsafe
+Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
+Description-Content-Type: text/x-rst
+
+MarkupSafe
+==========
+
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ pip install -U MarkupSafe
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+Examples
+--------
+
+.. code-block:: pycon
+
+ >>> from markupsafe import Markup, escape
+ >>> # escape replaces special characters and wraps in Markup
+ >>> escape('')
+ Markup(u'<script>alert(document.cookie);</script>')
+ >>> # wrap in Markup to mark text "safe" and prevent escaping
+ >>> Markup('Hello')
+ Markup('hello')
+ >>> escape(Markup('Hello'))
+ Markup('hello')
+ >>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
+ >>> # methods and operators escape their arguments
+ >>> template = Markup("Hello %s")
+ >>> template % '"World"'
+ Markup('Hello "World"')
+
+
+Donate
+------
+
+The Pallets organization develops and supports MarkupSafe and other
+libraries that use it. In order to grow the community of contributors
+and users, and allow the maintainers to devote more time to the
+projects, `please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+* Website: https://palletsprojects.com/p/markupsafe/
+* Documentation: https://markupsafe.palletsprojects.com/
+* License: `BSD-3-Clause `_
+* Releases: https://pypi.org/project/MarkupSafe/
+* Code: https://github.com/pallets/markupsafe
+* Issue tracker: https://github.com/pallets/markupsafe/issues
+* Test status:
+
+ * Linux, Mac: https://travis-ci.org/pallets/markupsafe
+ * Windows: https://ci.appveyor.com/project/pallets/markupsafe
+
+* Test coverage: https://codecov.io/gh/pallets/markupsafe
+* Official chat: https://discord.gg/t6rrQZH
+
+
diff --git a/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/RECORD b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/RECORD
new file mode 100644
index 0000000..f728e1f
--- /dev/null
+++ b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/RECORD
@@ -0,0 +1,15 @@
+MarkupSafe-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+MarkupSafe-1.1.1.dist-info/LICENSE.rst,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503
+MarkupSafe-1.1.1.dist-info/METADATA,sha256=IFCP4hCNGjXJgMoSvdjPiKDLAMUTTWoxKXQsQvmyMNU,3653
+MarkupSafe-1.1.1.dist-info/RECORD,,
+MarkupSafe-1.1.1.dist-info/WHEEL,sha256=jovIjvNuo6l5lHtTPdXyjKVQ_5SCkmdptE5fkPNfjyM,101
+MarkupSafe-1.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
+markupsafe/__init__.py,sha256=UAy1UKlykemnSZWIVn8RDqY0wvjV6lkeRwYOMNhw4bA,10453
+markupsafe/__pycache__/__init__.cpython-38.pyc,,
+markupsafe/__pycache__/_compat.cpython-38.pyc,,
+markupsafe/__pycache__/_constants.cpython-38.pyc,,
+markupsafe/__pycache__/_native.cpython-38.pyc,,
+markupsafe/_compat.py,sha256=XweNhJEcyTP_wIBUaIO6nxzIb6XFwweriXyZfiTpkdw,591
+markupsafe/_constants.py,sha256=IXLUQkLM6CTustG5vEQTEy6pBB3z5pm84NkYU1aW9qI,4954
+markupsafe/_native.py,sha256=LwsYk-GHoPsPboRD_tNC6_jTmCj3MLtsnDFis7HjE50,1942
+markupsafe/_speedups.cp38-win32.pyd,sha256=8nGEdcR_DUewvF72FxW6d-aD5SYoLMe2prWwEEnH9ck,12800
diff --git a/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL
new file mode 100644
index 0000000..117ab3f
--- /dev/null
+++ b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: false
+Tag: cp38-cp38-win32
+
diff --git a/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000..75bf729
--- /dev/null
+++ b/matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+markupsafe
diff --git a/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/INSTALLER b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/LICENSE.rst b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/LICENSE.rst
new file mode 100644
index 0000000..c37cae4
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/METADATA b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/METADATA
new file mode 100644
index 0000000..eb5f709
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/METADATA
@@ -0,0 +1,128 @@
+Metadata-Version: 2.1
+Name: Werkzeug
+Version: 1.0.1
+Summary: The comprehensive WSGI web application library.
+Home-page: https://palletsprojects.com/p/werkzeug/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://werkzeug.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/werkzeug
+Project-URL: Issue tracker, https://github.com/pallets/werkzeug/issues
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+Description-Content-Type: text/x-rst
+Provides-Extra: dev
+Requires-Dist: pytest ; extra == 'dev'
+Requires-Dist: pytest-timeout ; extra == 'dev'
+Requires-Dist: coverage ; extra == 'dev'
+Requires-Dist: tox ; extra == 'dev'
+Requires-Dist: sphinx ; extra == 'dev'
+Requires-Dist: pallets-sphinx-themes ; extra == 'dev'
+Requires-Dist: sphinx-issues ; extra == 'dev'
+Provides-Extra: watchdog
+Requires-Dist: watchdog ; extra == 'watchdog'
+
+Werkzeug
+========
+
+*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
+
+Werkzeug is a comprehensive `WSGI`_ web application library. It began as
+a simple collection of various utilities for WSGI applications and has
+become one of the most advanced WSGI utility libraries.
+
+It includes:
+
+- An interactive debugger that allows inspecting stack traces and
+ source code in the browser with an interactive interpreter for any
+ frame in the stack.
+- A full-featured request object with objects to interact with
+ headers, query args, form data, files, and cookies.
+- A response object that can wrap other WSGI applications and handle
+ streaming data.
+- A routing system for matching URLs to endpoints and generating URLs
+ for endpoints, with an extensible system for capturing variables
+ from URLs.
+- HTTP utilities to handle entity tags, cache control, dates, user
+ agents, cookies, files, and more.
+- A threaded WSGI server for use while developing applications
+ locally.
+- A test client for simulating HTTP requests during testing without
+ requiring running a server.
+
+Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up
+to the developer to choose a template engine, database adapter, and even
+how to handle requests. It can be used to build all sorts of end user
+applications such as blogs, wikis, or bulletin boards.
+
+`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
+providing more structure and patterns for defining powerful
+applications.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ pip install -U Werkzeug
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+ from werkzeug.wrappers import Request, Response
+
+ @Request.application
+ def application(request):
+ return Response('Hello, World!')
+
+ if __name__ == '__main__':
+ from werkzeug.serving import run_simple
+ run_simple('localhost', 4000, application)
+
+
+Links
+-----
+
+- Website: https://palletsprojects.com/p/werkzeug/
+- Documentation: https://werkzeug.palletsprojects.com/
+- Releases: https://pypi.org/project/Werkzeug/
+- Code: https://github.com/pallets/werkzeug
+- Issue tracker: https://github.com/pallets/werkzeug/issues
+- Test status: https://dev.azure.com/pallets/werkzeug/_build
+- Official chat: https://discord.gg/t6rrQZH
+
+.. _WSGI: https://wsgi.readthedocs.io/en/latest/
+.. _Flask: https://www.palletsprojects.com/p/flask/
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
diff --git a/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/RECORD b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/RECORD
new file mode 100644
index 0000000..9bc5975
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/RECORD
@@ -0,0 +1,101 @@
+Werkzeug-1.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Werkzeug-1.0.1.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
+Werkzeug-1.0.1.dist-info/METADATA,sha256=d0zmVNa4UC2-nAo2A8_81oiy123D6JTGRSuY_Ymgyt4,4730
+Werkzeug-1.0.1.dist-info/RECORD,,
+Werkzeug-1.0.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+Werkzeug-1.0.1.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
+werkzeug/__init__.py,sha256=rb-yPiXOjTLbtDOl5fQp5hN7oBdaoXAoQ-slAAvfZAo,502
+werkzeug/__pycache__/__init__.cpython-38.pyc,,
+werkzeug/__pycache__/_compat.cpython-38.pyc,,
+werkzeug/__pycache__/_internal.cpython-38.pyc,,
+werkzeug/__pycache__/_reloader.cpython-38.pyc,,
+werkzeug/__pycache__/datastructures.cpython-38.pyc,,
+werkzeug/__pycache__/exceptions.cpython-38.pyc,,
+werkzeug/__pycache__/filesystem.cpython-38.pyc,,
+werkzeug/__pycache__/formparser.cpython-38.pyc,,
+werkzeug/__pycache__/http.cpython-38.pyc,,
+werkzeug/__pycache__/local.cpython-38.pyc,,
+werkzeug/__pycache__/posixemulation.cpython-38.pyc,,
+werkzeug/__pycache__/routing.cpython-38.pyc,,
+werkzeug/__pycache__/security.cpython-38.pyc,,
+werkzeug/__pycache__/serving.cpython-38.pyc,,
+werkzeug/__pycache__/test.cpython-38.pyc,,
+werkzeug/__pycache__/testapp.cpython-38.pyc,,
+werkzeug/__pycache__/urls.cpython-38.pyc,,
+werkzeug/__pycache__/useragents.cpython-38.pyc,,
+werkzeug/__pycache__/utils.cpython-38.pyc,,
+werkzeug/__pycache__/wsgi.cpython-38.pyc,,
+werkzeug/_compat.py,sha256=zjufTNrhQ8BgYSGSh-sVu6iW3r3O9WzjE9j-qJobx-g,6671
+werkzeug/_internal.py,sha256=d_4AqheyS6dHMViwdc0drFrjs67ZzT6Ej2gWf-Z-Iys,14351
+werkzeug/_reloader.py,sha256=I3mg3oRQ0lLzl06oEoVopN3bN7CtINuuUQdqDcmTnEs,11531
+werkzeug/datastructures.py,sha256=AonxOcwU0TPMEzfKF1368ySULxHgxE-JE-DEAGdo2ts,100480
+werkzeug/debug/__init__.py,sha256=3RtUMc5Y9hYyK11ugHltgkQ9Dt-ViR945Vy_X5NV7zU,17289
+werkzeug/debug/__pycache__/__init__.cpython-38.pyc,,
+werkzeug/debug/__pycache__/console.cpython-38.pyc,,
+werkzeug/debug/__pycache__/repr.cpython-38.pyc,,
+werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,,
+werkzeug/debug/console.py,sha256=OATaO7KHYMqpbzIFe1HeW9Mnl3wZgA3jMQoGDPn5URc,5488
+werkzeug/debug/repr.py,sha256=lIwuhbyrMwVe3P_cFqNyqzHL7P93TLKod7lw9clydEw,9621
+werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
+werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
+werkzeug/debug/shared/debugger.js,sha256=rOhqZMRfpZnnu6_XCGn6wMWPhtfwRAcyZKksdIxPJas,6400
+werkzeug/debug/shared/jquery.js,sha256=CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo,88145
+werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
+werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
+werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
+werkzeug/debug/shared/style.css,sha256=gZ9uhmb5zj3XLuT9RvnMp6jMINgQ-VVBCp-2AZbG3YQ,6604
+werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
+werkzeug/debug/tbtools.py,sha256=2iJ8RURUZUSbopOIehy53LnVJWx47lsHN2V2l6hc7Wc,20363
+werkzeug/exceptions.py,sha256=UTYSDkmAsH-vt8VSidlEffwqBVNXuT7bRg-_NqgUe8A,25188
+werkzeug/filesystem.py,sha256=HzKl-j0Hd8Jl66j778UbPTAYNnY6vUZgYLlBZ0e7uw0,2101
+werkzeug/formparser.py,sha256=Sto0jZid9im9ZVIf56vilCdyX-arK33wSftkYsLCnzo,21788
+werkzeug/http.py,sha256=KVRV3yFK14PJeI56qClEq4qxFdvKUQVy4C_dwuWz9_Q,43107
+werkzeug/local.py,sha256=_Tk7gB238pPWUU7habxFkZF02fiCMRVW6d62YWL1Rh0,14371
+werkzeug/middleware/__init__.py,sha256=f1SFZo67IlW4k1uqKzNHxYQlsakUS-D6KK_j0e3jjwQ,549
+werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,,
+werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,,
+werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,,
+werkzeug/middleware/__pycache__/lint.cpython-38.pyc,,
+werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,,
+werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,,
+werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,,
+werkzeug/middleware/dispatcher.py,sha256=_-KoMzHtcISHS7ouWKAOraqlCLprdh83YOAn_8DjLp8,2240
+werkzeug/middleware/http_proxy.py,sha256=lRjTdMmghHiZuZrS7_UJ3gZc-vlFizhBbFZ-XZPLwIA,7117
+werkzeug/middleware/lint.py,sha256=ItTwuWJnflF8xMT1uqU_Ty1ryhux-CjeUfskqaUpxsw,12967
+werkzeug/middleware/profiler.py,sha256=8B_s23d6BGrU_q54gJsm6kcCbOJbTSqrXCsioHON0Xs,4471
+werkzeug/middleware/proxy_fix.py,sha256=K5oZ3DPXOzdZi0Xba5zW7ClPOxgUuqXHQHvY2-AWCGw,6431
+werkzeug/middleware/shared_data.py,sha256=sPSRTKqtKSVBUyN8fr6jOJbdq9cdOLu6pg3gz4Y_1Xo,9599
+werkzeug/posixemulation.py,sha256=gSSiv1SCmOyzOM_nq1ZaZCtxP__C5MeDJl_4yXJmi4Q,3541
+werkzeug/routing.py,sha256=6-iZ7CKeUILYAehoKXLbmi5E6LgLbwuzUh8TNplnf5Q,79019
+werkzeug/security.py,sha256=81149MplFq7-hD4RK4sKp9kzXXejjV9D4lWBzaRyeQ8,8106
+werkzeug/serving.py,sha256=YvTqvurA-Mnj8mkqRe2kBdVr2ap4ibCq1ByQjOA6g1w,38694
+werkzeug/test.py,sha256=GJ9kxTMSJ-nB7kfGtxuROr9JGmXxDRev-2U1SkeUJGE,39564
+werkzeug/testapp.py,sha256=bHekqMsqRfVxwgFbvOMem-DYa_sdB7R47yUXpt1RUTo,9329
+werkzeug/urls.py,sha256=T8-hV_1vwhu6xhX93FwsHteK-W-kIE2orj5WoMf-WFw,39322
+werkzeug/useragents.py,sha256=TSoGv5IOvP375eK5gLLpsLQCeUgTR6sO1WftmAP_YvM,5563
+werkzeug/utils.py,sha256=hrVK4u_wi8z9viBO9bgOLlm1aaIvCpn-p2d1FeZQDEo,25251
+werkzeug/wrappers/__init__.py,sha256=S4VioKAmF_av9Ec9zQvG71X1EOkYfPx1TYck9jyDiyY,1384
+werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/accept.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/auth.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/base_request.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/base_response.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/common_descriptors.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/cors.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/etag.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/json.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/request.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/response.cpython-38.pyc,,
+werkzeug/wrappers/__pycache__/user_agent.cpython-38.pyc,,
+werkzeug/wrappers/accept.py,sha256=TIvjUc0g73fhTWX54wg_D9NNzKvpnG1X8u1w26tK1o8,1760
+werkzeug/wrappers/auth.py,sha256=Pmn6iaGHBrUyHbJpW0lZhO_q9RVoAa5QalaTqcavdAI,1158
+werkzeug/wrappers/base_request.py,sha256=4TuGlKWeKQdlq4eU94hJYcXSfWo8Rk7CS1Ef5lJ3ZM0,26012
+werkzeug/wrappers/base_response.py,sha256=JTxJZ8o-IBetpoWJqt2HFwPaNWNDAlM3_GXJe1Whw80,27784
+werkzeug/wrappers/common_descriptors.py,sha256=X2Ktd5zUWsmcd4ciaF62Dd8Lru9pLGP_XDUNukc8cXs,12829
+werkzeug/wrappers/cors.py,sha256=XMbaCol4dWTGvb-dCJBoN0p3JX91v93AIAHd7tnB3L4,3466
+werkzeug/wrappers/etag.py,sha256=XMXtyfByBsOjxwaX8U7ZtUY7JXkbQLP45oXZ0qkyTNs,12217
+werkzeug/wrappers/json.py,sha256=HvK_A4NpO0sLqgb10sTJcoZydYOwyNiPCJPV7SVgcgE,4343
+werkzeug/wrappers/request.py,sha256=QbHGqDpGPN684pnOPEokwkPESfm-NnfYM7ydOMxW_NI,1514
+werkzeug/wrappers/response.py,sha256=Oqv8TMG_dnOKTq_V30ddgkO5B7IJhkVPODvm7cbhZ3c,2524
+werkzeug/wrappers/user_agent.py,sha256=YJb-vr12cujG7sQMG9V89VsJa-03SWSenhg1W4cT0EY,435
+werkzeug/wsgi.py,sha256=ZGk85NzRyQTzkYis-xl8V9ydJgfClBdStvhzDzER2mw,34367
diff --git a/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/WHEEL b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/WHEEL
new file mode 100644
index 0000000..ef99c6c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/top_level.txt b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/top_level.txt
new file mode 100644
index 0000000..6fe8da8
--- /dev/null
+++ b/matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+werkzeug
diff --git a/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/INSTALLER b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/LICENSE b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/LICENSE
new file mode 100644
index 0000000..a612ad9
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/METADATA b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/METADATA
new file mode 100644
index 0000000..4fc45ea
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/METADATA
@@ -0,0 +1,334 @@
+Metadata-Version: 2.1
+Name: bidict
+Version: 0.21.2
+Summary: The bidirectional mapping library for Python.
+Home-page: https://bidict.readthedocs.io
+Author: Joshua Bronson
+Author-email: jabronson@gmail.com
+License: MPL 2.0
+Keywords: dict dictionary mapping datastructure bimap bijection bijective injective inverse reverse bidirectional two-way 2-way
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+Provides-Extra: coverage
+Requires-Dist: coverage (<6) ; extra == 'coverage'
+Requires-Dist: pytest-cov (<3) ; extra == 'coverage'
+Provides-Extra: dev
+Requires-Dist: setuptools-scm ; extra == 'dev'
+Requires-Dist: hypothesis (<6) ; extra == 'dev'
+Requires-Dist: py (<2) ; extra == 'dev'
+Requires-Dist: pytest (<7) ; extra == 'dev'
+Requires-Dist: pytest-benchmark (<4,>=3.2.0) ; extra == 'dev'
+Requires-Dist: sortedcollections (<2) ; extra == 'dev'
+Requires-Dist: sortedcontainers (<3) ; extra == 'dev'
+Requires-Dist: Sphinx (<4) ; extra == 'dev'
+Requires-Dist: sphinx-autodoc-typehints (<2) ; extra == 'dev'
+Requires-Dist: coverage (<6) ; extra == 'dev'
+Requires-Dist: pytest-cov (<3) ; extra == 'dev'
+Requires-Dist: pre-commit (<3) ; extra == 'dev'
+Requires-Dist: tox (<4) ; extra == 'dev'
+Provides-Extra: docs
+Requires-Dist: Sphinx (<4) ; extra == 'docs'
+Requires-Dist: sphinx-autodoc-typehints (<2) ; extra == 'docs'
+Provides-Extra: precommit
+Requires-Dist: pre-commit (<3) ; extra == 'precommit'
+Provides-Extra: test
+Requires-Dist: hypothesis (<6) ; extra == 'test'
+Requires-Dist: py (<2) ; extra == 'test'
+Requires-Dist: pytest (<7) ; extra == 'test'
+Requires-Dist: pytest-benchmark (<4,>=3.2.0) ; extra == 'test'
+Requires-Dist: sortedcollections (<2) ; extra == 'test'
+Requires-Dist: sortedcontainers (<3) ; extra == 'test'
+Requires-Dist: Sphinx (<4) ; extra == 'test'
+Requires-Dist: sphinx-autodoc-typehints (<2) ; extra == 'test'
+
+.. Forward declarations for all the custom interpreted text roles that
+ Sphinx defines and that are used below. This helps Sphinx-unaware tools
+ (e.g. rst2html, PyPI's and GitHub's renderers, etc.).
+.. role:: doc
+
+.. Use :doc: rather than :ref: references below for better interop as well.
+
+
+``bidict``
+==========
+
+The bidirectional mapping library for Python.
+
+.. image:: https://raw.githubusercontent.com/jab/bidict/master/assets/logo-sm.png
+ :target: https://bidict.readthedocs.io/
+ :alt: bidict logo
+
+
+Status
+------
+
+.. image:: https://img.shields.io/pypi/v/bidict.svg
+ :target: https://pypi.org/project/bidict
+ :alt: Latest release
+
+.. image:: https://img.shields.io/readthedocs/bidict/master.svg
+ :target: https://bidict.readthedocs.io/en/master/
+ :alt: Documentation
+
+.. image:: https://api.travis-ci.org/jab/bidict.svg?branch=master
+ :target: https://travis-ci.org/jab/bidict
+ :alt: Travis-CI build status
+
+.. image:: https://codecov.io/gh/jab/bidict/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/jab/bidict
+ :alt: Test coverage
+
+.. Hide to reduce clutter
+ .. image:: https://img.shields.io/lgtm/alerts/github/jab/bidict.svg
+ :target: https://lgtm.com/projects/g/jab/bidict/
+ :alt: LGTM alerts
+ .. image:: https://bestpractices.coreinfrastructure.org/projects/2354/badge
+ :target: https://bestpractices.coreinfrastructure.org/en/projects/2354
+ :alt: CII best practices badge
+ .. image:: https://img.shields.io/badge/tidelift-pro%20support-orange.svg
+ :target: https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=docs
+ :alt: Paid support available via Tidelift
+ .. image:: https://ci.appveyor.com/api/projects/status/gk133415udncwto3/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/jab/bidict
+ :alt: AppVeyor (Windows) build status
+ .. image:: https://img.shields.io/pypi/pyversions/bidict.svg
+ :target: https://pypi.org/project/bidict
+ :alt: Supported Python versions
+ .. image:: https://img.shields.io/pypi/implementation/bidict.svg
+ :target: https://pypi.org/project/bidict
+ :alt: Supported Python implementations
+
+.. image:: https://img.shields.io/pypi/l/bidict.svg
+ :target: https://raw.githubusercontent.com/jab/bidict/master/LICENSE
+ :alt: License
+
+.. image:: https://static.pepy.tech/badge/bidict
+ :target: https://pepy.tech/project/bidict
+ :alt: PyPI Downloads
+
+
+``bidict``:
+^^^^^^^^^^^
+
+- has been used for many years by several teams at
+ **Google, Venmo, CERN, Bank of America Merrill Lynch, Bloomberg, Two Sigma,** and many others
+- has carefully designed APIs for
+ **safety, simplicity, flexibility, and ergonomics**
+- is **fast, lightweight, and has no runtime dependencies** other than Python's standard library
+- **integrates natively** with Python’s ``collections.abc`` interfaces
+- provides **type hints** for all public APIs
+- is implemented in **concise, well-factored, pure (PyPy-compatible) Python code**
+ that is **optimized for running efficiently**
+ as well as for **reading and learning** [#fn-learning]_
+- has **extensive docs and test coverage**
+ (including property-based tests and benchmarks)
+ run continuously on all supported Python versions
+
+
+Note: Python 3 Required
+~~~~~~~~~~~~~~~~~~~~~~~
+
+As promised in the 0.18.2 release (see :doc:`changelog` [#fn-changelog]_),
+**Python 2 is no longer supported**.
+Version 0.18.3
+is the last release of ``bidict`` that supports Python 2.
+This makes ``bidict`` more efficient on Python 3
+and enables further improvement to bidict in the future.
+See `python3statement.org `__
+for more info.
+
+
+Installation
+------------
+
+``pip install bidict``
+
+
+Quick Start
+-----------
+
+.. code:: python
+
+ >>> from bidict import bidict
+ >>> element_by_symbol = bidict({'H': 'hydrogen'})
+ >>> element_by_symbol['H']
+ 'hydrogen'
+ >>> element_by_symbol.inverse['hydrogen']
+ 'H'
+
+
+For more usage documentation,
+head to the :doc:`intro` [#fn-intro]_
+and proceed from there.
+
+
+Community Support
+-----------------
+
+.. image:: https://img.shields.io/badge/chat-on%20gitter-5AB999.svg?logo=gitter-white
+ :target: https://gitter.im/jab/bidict
+ :alt: Chat
+
+If you are thinking of using ``bidict`` in your work,
+or if you have any questions, comments, or suggestions,
+I'd love to know about your use case
+and provide as much voluntary support for it as possible.
+
+Please feel free to leave a message in the
+`chatroom `__
+or open a new issue on GitHub.
+You can search through
+`existing issues `__
+before creating a new one
+in case your questions or concerns have been adressed there already.
+
+
+Enterprise-Grade Support via Tidelift
+-------------------------------------
+
+.. image:: https://img.shields.io/badge/tidelift-pro%20support-orange.svg
+ :target: https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=readme
+ :alt: Paid support available via Tidelift
+
+If your use case requires a greater level of support,
+enterprise-grade support for ``bidict`` can be obtained via the
+`Tidelift subscription `__.
+
+
+Notice of Usage
+---------------
+
+If you use ``bidict``,
+and especially if your usage or your organization is significant in some way,
+please let me know.
+
+You can:
+
+- `star bidict on GitHub `__
+- `create an issue `__
+- leave a message in the `chat room `__
+- `email me `__
+
+
+Changelog
+---------
+
+See the :doc:`changelog` [#fn-changelog]_
+for a history of notable changes to ``bidict``.
+
+
+Release Notifications
+---------------------
+
+.. duplicated in CHANGELOG.rst:
+ (would use `.. include::` but GitHub doesn't understand it)
+
+.. image:: https://img.shields.io/badge/libraries.io-subscribe-5BC0DF.svg
+ :target: https://libraries.io/pypi/bidict
+ :alt: Follow on libraries.io
+
+Subscribe to releases
+`on GitHub `__ or
+`libraries.io `__
+to be notified when new versions of ``bidict`` are released.
+
+
+Learning from ``bidict``
+------------------------
+
+One of the best things about ``bidict``
+is that it touches a surprising number of
+interesting Python corners,
+especially given its small size and scope.
+
+Check out :doc:`learning-from-bidict` [#fn-learning]_
+if you're interested in learning more.
+
+
+Contributing
+------------
+
+``bidict`` is currently a one-person operation
+maintained on a voluntary basis.
+
+Your help would be most welcome!
+
+
+Reviewers Wanted!
+^^^^^^^^^^^^^^^^^
+
+One of the most valuable ways to contribute to ``bidict`` –
+and to explore some interesting Python corners [#fn-learning]_
+while you're at it –
+is to review the relatively small codebase.
+
+Please create an issue or pull request with any improvements you'd propose
+or any other results you found.
+Submitting a `draft PR `__
+with feedback in inline code comments, or a
+`"Review results" issue `__,
+would each work well.
+
+You can also
++1 `this issue `__
+to sign up to give feedback on future proposed changes
+that are in need of a reviewer.
+
+
+Giving Back
+^^^^^^^^^^^
+
+.. duplicated in CONTRIBUTING.rst
+ (would use `.. include::` but GitHub doesn't understand it)
+
+``bidict`` is the product of hundreds of hours of unpaid, voluntary work.
+
+If ``bidict`` has helped you accomplish your work,
+especially work you've been paid for,
+please consider chipping in toward the costs
+of its maintenance and development
+and/or ask your organization to do the same.
+
+.. image:: https://raw.githubusercontent.com/jab/bidict/master/assets/support-on-gumroad.png
+ :target: https://gumroad.com/l/bidict
+ :alt: Support bidict
+
+
+Finding Documentation
+---------------------
+
+If you're viewing this on ``__,
+note that multiple versions of the documentation are available,
+and you can choose a different version using the popup menu at the bottom-right.
+Please make sure you're viewing the version of the documentation
+that corresponds to the version of ``bidict`` you'd like to use.
+
+If you're viewing this on GitHub, PyPI, or some other place
+that can't render and link this documentation properly
+and are seeing broken links,
+try these alternate links instead:
+
+.. [#fn-learning] ``__ | ``__
+
+.. [#fn-changelog] ``__ | ``__
+
+.. [#fn-intro] ``__ | ``__
+
+
+----
+
+Next: :doc:`intro` [#fn-intro]_
+
+
diff --git a/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/RECORD b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/RECORD
new file mode 100644
index 0000000..cb526f6
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/RECORD
@@ -0,0 +1,41 @@
+bidict-0.21.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+bidict-0.21.2.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
+bidict-0.21.2.dist-info/METADATA,sha256=6p33oEnK6iIEBM4o7wQLGPUyeYHtc-yEW6_s05N3d5c,11630
+bidict-0.21.2.dist-info/RECORD,,
+bidict-0.21.2.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+bidict-0.21.2.dist-info/top_level.txt,sha256=WuQO02jp0ODioS7sJoaHg3JJ5_3h6Sxo9RITvNGPYmc,7
+bidict/__init__.py,sha256=A2ZUK4jTHNN6T3QUaSh7xuIwc-Ytgw6gVLHNx07D7Fo,3910
+bidict/__pycache__/__init__.cpython-38.pyc,,
+bidict/__pycache__/_abc.cpython-38.pyc,,
+bidict/__pycache__/_base.cpython-38.pyc,,
+bidict/__pycache__/_bidict.cpython-38.pyc,,
+bidict/__pycache__/_delegating.cpython-38.pyc,,
+bidict/__pycache__/_dup.cpython-38.pyc,,
+bidict/__pycache__/_exc.cpython-38.pyc,,
+bidict/__pycache__/_frozenbidict.cpython-38.pyc,,
+bidict/__pycache__/_frozenordered.cpython-38.pyc,,
+bidict/__pycache__/_iter.cpython-38.pyc,,
+bidict/__pycache__/_mut.cpython-38.pyc,,
+bidict/__pycache__/_named.cpython-38.pyc,,
+bidict/__pycache__/_orderedbase.cpython-38.pyc,,
+bidict/__pycache__/_orderedbidict.cpython-38.pyc,,
+bidict/__pycache__/_typing.cpython-38.pyc,,
+bidict/__pycache__/_version.cpython-38.pyc,,
+bidict/__pycache__/metadata.cpython-38.pyc,,
+bidict/_abc.py,sha256=irEWsolFCp8ps77OKmWwB0gTrpXc5be0RBdHaQoPybk,4626
+bidict/_base.py,sha256=k7oLFwb_6ZMHMhfI217hnM-WfJ4oxVMTol1BG14E3cA,16180
+bidict/_bidict.py,sha256=85G1TyWeMZLE70HK-qwCVug-bCdaI3bIeoBxJzwSkkQ,2005
+bidict/_delegating.py,sha256=UibZewwgmN8iBECZtjELwKl5zhcuxYnyy2gsiAXBe3c,1313
+bidict/_dup.py,sha256=j0DSseguIdCgAhqxm0Zn2887110zx70F19Lvw7hiayg,1819
+bidict/_exc.py,sha256=nKOGqxqOvyjheh-Pgo-dZZWRRvPEWYyD8Ukm5XR8WNk,1053
+bidict/_frozenbidict.py,sha256=IYMIzsm9pAXTS819Tw7z_VTLIEZir4oLJbrcRc5yFP8,2494
+bidict/_frozenordered.py,sha256=E4kzBIoriZLuth9I1ll57KelvUN_xDAvZjQH7GNdn30,3224
+bidict/_iter.py,sha256=F9zoHs-IrkucujbRGnMJslH_Gc_Qrla4Mk1sOvn7ELg,2333
+bidict/_mut.py,sha256=MBXzglmeNJniRbdZ1C0Tx14pcsaBdi1NPaaFGIzZEpg,7352
+bidict/_named.py,sha256=_WQjoz9pE1d_HwVQX05vn5TthOREOw49yDdFSs5lvU4,3784
+bidict/_orderedbase.py,sha256=yMIRfDtY5DQJoAeI5YvIW49O42MuKqK8qxDrczr1NQY,12196
+bidict/_orderedbidict.py,sha256=tkfAMxehLetMqTrGoQq9KfdOpgRdhzWqp2lmk6_4vL0,3409
+bidict/_typing.py,sha256=3lq-wZhWGyn3q7euw6YK7LwFnxOVB1qdqX1x1HcW4Ng,862
+bidict/_version.py,sha256=e4Wu3F4t-gj1TaiLYadYEQ_3R8pNGz4Xi1K4eN1WFIw,117
+bidict/metadata.py,sha256=htEXequ7kpMnWeRKrl4cUJZBQIbBegxgu_bxFZ0pIkY,1812
+bidict/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/WHEEL b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/WHEEL
new file mode 100644
index 0000000..6d38aa0
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/top_level.txt b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/top_level.txt
new file mode 100644
index 0000000..6ff5b04
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+bidict
diff --git a/matteo_env/Lib/site-packages/bidict/__init__.py b/matteo_env/Lib/site-packages/bidict/__init__.py
new file mode 100644
index 0000000..27af47d
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/__init__.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# Current: __init__.py Next: _abc.py →
+#==============================================================================
+
+
+"""The bidirectional mapping library for Python.
+
+bidict by example:
+
+.. code-block:: python
+
+ >>> from bidict import bidict
+ >>> element_by_symbol = bidict({'H': 'hydrogen'})
+ >>> element_by_symbol['H']
+ 'hydrogen'
+ >>> element_by_symbol.inverse['hydrogen']
+ 'H'
+
+
+Please see https://github.com/jab/bidict for the most up-to-date code and
+https://bidict.readthedocs.io for the most up-to-date documentation
+if you are reading this elsewhere.
+
+
+.. :copyright: (c) 2009-2020 Joshua Bronson.
+.. :license: MPLv2. See LICENSE for details.
+"""
+
+# Use private aliases to not re-export these publicly (for Sphinx automodule with imported-members).
+from sys import version_info as _version_info
+
+
+if _version_info < (3, 6): # pragma: no cover
+ raise ImportError('Python 3.6+ is required.')
+
+# The rest of this file only collects functionality implemented in the rest of the
+# source for the purposes of exporting it under the `bidict` module namespace.
+# flake8: noqa: F401 (imported but unused)
+from ._abc import BidirectionalMapping, MutableBidirectionalMapping
+from ._base import BidictBase
+from ._mut import MutableBidict
+from ._bidict import bidict
+from ._frozenbidict import frozenbidict
+from ._frozenordered import FrozenOrderedBidict
+from ._named import namedbidict
+from ._orderedbase import OrderedBidictBase
+from ._orderedbidict import OrderedBidict
+from ._dup import ON_DUP_DEFAULT, ON_DUP_RAISE, ON_DUP_DROP_OLD, RAISE, DROP_OLD, DROP_NEW, OnDup, OnDupAction
+from ._exc import BidictException, DuplicationError, KeyDuplicationError, ValueDuplicationError, KeyAndValueDuplicationError
+from ._iter import inverted
+from .metadata import (
+ __author__, __maintainer__, __copyright__, __email__, __credits__, __url__,
+ __license__, __status__, __description__, __keywords__, __version__, __version_info__,
+)
+
+# Set __module__ of re-exported classes to the 'bidict' top-level module name
+# so that private/internal submodules are not exposed to users e.g. in repr strings.
+_locals = tuple(locals().items())
+for _name, _obj in _locals: # pragma: no cover
+ if not getattr(_obj, '__module__', '').startswith('bidict.'):
+ continue
+ try:
+ _obj.__module__ = 'bidict'
+ except AttributeError as exc: # raised when __module__ is read-only (as in OnDup)
+ pass
+
+
+# * Code review nav *
+#==============================================================================
+# Current: __init__.py Next: _abc.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_abc.py b/matteo_env/Lib/site-packages/bidict/_abc.py
new file mode 100644
index 0000000..9a30679
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_abc.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: __init__.py Current: _abc.py Next: _base.py →
+#==============================================================================
+
+
+"""Provide the :class:`BidirectionalMapping` abstract base class."""
+
+import typing as _t
+from abc import abstractmethod
+
+from ._typing import KT, VT
+
+
+class BidirectionalMapping(_t.Mapping[KT, VT]):
+ """Abstract base class (ABC) for bidirectional mapping types.
+
+ Extends :class:`collections.abc.Mapping` primarily by adding the
+ (abstract) :attr:`inverse` property,
+ which implementors of :class:`BidirectionalMapping`
+ should override to return a reference to the inverse
+ :class:`BidirectionalMapping` instance.
+ """
+
+ __slots__ = ()
+
+ @property
+ @abstractmethod
+ def inverse(self) -> 'BidirectionalMapping[VT, KT]':
+ """The inverse of this bidirectional mapping instance.
+
+ *See also* :attr:`bidict.BidictBase.inverse`, :attr:`bidict.BidictBase.inv`
+
+ :raises NotImplementedError: Meant to be overridden in subclasses.
+ """
+ # The @abstractproperty decorator prevents BidirectionalMapping subclasses from being
+ # instantiated unless they override this method. So users shouldn't be able to get to the
+ # point where they can unintentionally call this implementation of .inverse on something
+ # anyway. Could leave the method body empty, but raise NotImplementedError so it's extra
+ # clear there's no reason to call this implementation (e.g. via super() after overriding).
+ raise NotImplementedError
+
+ def __inverted__(self) -> _t.Iterator[_t.Tuple[VT, KT]]:
+ """Get an iterator over the items in :attr:`inverse`.
+
+ This is functionally equivalent to iterating over the items in the
+ forward mapping and inverting each one on the fly, but this provides a
+ more efficient implementation: Assuming the already-inverted items
+ are stored in :attr:`inverse`, just return an iterator over them directly.
+
+ Providing this default implementation enables external functions,
+ particularly :func:`~bidict.inverted`, to use this optimized
+ implementation when available, instead of having to invert on the fly.
+
+ *See also* :func:`bidict.inverted`
+ """
+ return iter(self.inverse.items())
+
+ def values(self) -> _t.AbstractSet[VT]: # type: ignore # https://github.com/python/typeshed/issues/4435
+ """A set-like object providing a view on the contained values.
+
+ Override the implementation inherited from
+ :class:`~collections.abc.Mapping`.
+ Because the values of a :class:`~bidict.BidirectionalMapping`
+ are the keys of its inverse,
+ this returns a :class:`~collections.abc.KeysView`
+ rather than a :class:`~collections.abc.ValuesView`,
+ which has the advantages of constant-time containment checks
+ and supporting set operations.
+ """
+ return self.inverse.keys()
+
+
+class MutableBidirectionalMapping(BidirectionalMapping[KT, VT], _t.MutableMapping[KT, VT]):
+ """Abstract base class (ABC) for mutable bidirectional mapping types."""
+
+ __slots__ = ()
+
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: __init__.py Current: _abc.py Next: _base.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_base.py b/matteo_env/Lib/site-packages/bidict/_base.py
new file mode 100644
index 0000000..f100805
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_base.py
@@ -0,0 +1,383 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _abc.py Current: _base.py Next: _frozenbidict.py →
+#==============================================================================
+
+
+"""Provide :class:`BidictBase`."""
+
+import typing as _t
+from collections import namedtuple
+from copy import copy
+from weakref import ref
+
+from ._abc import BidirectionalMapping
+from ._dup import ON_DUP_DEFAULT, RAISE, DROP_OLD, DROP_NEW, OnDup
+from ._exc import DuplicationError, KeyDuplicationError, ValueDuplicationError, KeyAndValueDuplicationError
+from ._iter import _iteritems_args_kw
+from ._typing import _NONE, KT, VT, OKT, OVT, IterItems, MapOrIterItems
+
+
+_WriteResult = namedtuple('_WriteResult', 'key val oldkey oldval')
+_DedupResult = namedtuple('_DedupResult', 'isdupkey isdupval invbyval fwdbykey')
+_NODUP = _DedupResult(False, False, _NONE, _NONE)
+
+BT = _t.TypeVar('BT', bound='BidictBase') # typevar for BidictBase.copy
+
+
+class BidictBase(BidirectionalMapping[KT, VT]):
+ """Base class implementing :class:`BidirectionalMapping`."""
+
+ __slots__ = ['_fwdm', '_invm', '_inv', '_invweak', '_hash', '__weakref__']
+
+ #: The default :class:`~bidict.OnDup`
+ #: that governs behavior when a provided item
+ #: duplicates the key or value of other item(s).
+ #:
+ #: *See also* :ref:`basic-usage:Values Must Be Unique`, :doc:`extending`
+ on_dup = ON_DUP_DEFAULT
+
+ _fwdm_cls = dict #: class of the backing forward mapping
+ _invm_cls = dict #: class of the backing inverse mapping
+
+ #: The object used by :meth:`__repr__` for printing the contained items.
+ _repr_delegate = dict
+
+ def __init_subclass__(cls, **kw):
+ super().__init_subclass__(**kw)
+ # Compute and set _inv_cls, the inverse of this bidict class.
+ if '_inv_cls' in cls.__dict__:
+ return
+ if cls._fwdm_cls is cls._invm_cls:
+ cls._inv_cls = cls
+ return
+ inv_cls = type(cls.__name__ + 'Inv', cls.__bases__, {
+ **cls.__dict__,
+ '_inv_cls': cls,
+ '_fwdm_cls': cls._invm_cls,
+ '_invm_cls': cls._fwdm_cls,
+ })
+ cls._inv_cls = inv_cls
+
+ @_t.overload
+ def __init__(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def __init__(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def __init__(self, **kw: VT) -> None: ...
+ def __init__(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ """Make a new bidirectional dictionary.
+ The signature behaves like that of :class:`dict`.
+ Items passed in are added in the order they are passed,
+ respecting the :attr:`on_dup` class attribute in the process.
+ """
+ #: The backing :class:`~collections.abc.Mapping`
+ #: storing the forward mapping data (*key* → *value*).
+ self._fwdm: _t.Dict[KT, VT] = self._fwdm_cls()
+ #: The backing :class:`~collections.abc.Mapping`
+ #: storing the inverse mapping data (*value* → *key*).
+ self._invm: _t.Dict[VT, KT] = self._invm_cls()
+ self._init_inv()
+ if args or kw:
+ self._update(True, self.on_dup, *args, **kw)
+
+ def _init_inv(self) -> None:
+ # Create the inverse bidict instance via __new__, bypassing its __init__ so that its
+ # _fwdm and _invm can be assigned to this bidict's _invm and _fwdm. Store it in self._inv,
+ # which holds a strong reference to a bidict's inverse, if one is available.
+ self._inv = inv = self._inv_cls.__new__(self._inv_cls) # type: ignore
+ inv._fwdm = self._invm
+ inv._invm = self._fwdm
+ # Only give the inverse a weak reference to this bidict to avoid creating a reference cycle,
+ # stored in the _invweak attribute. See also the docs in
+ # :ref:`addendum:Bidict Avoids Reference Cycles`
+ inv._inv = None
+ inv._invweak = ref(self)
+ # Since this bidict has a strong reference to its inverse already, set its _invweak to None.
+ self._invweak = None
+
+ @property
+ def _isinv(self) -> bool:
+ return self._inv is None
+
+ @property
+ def inverse(self) -> 'BidictBase[VT, KT]':
+ """The inverse of this bidict."""
+ # Resolve and return a strong reference to the inverse bidict.
+ # One may be stored in self._inv already.
+ if self._inv is not None:
+ return self._inv # type: ignore
+ # Otherwise a weakref is stored in self._invweak. Try to get a strong ref from it.
+ assert self._invweak is not None
+ inv = self._invweak()
+ if inv is not None:
+ return inv
+ # Refcount of referent must have dropped to zero, as in `bidict().inv.inv`. Init a new one.
+ self._init_inv() # Now this bidict will retain a strong ref to its inverse.
+ return self._inv
+
+ #: Alias for :attr:`inverse`.
+ inv = inverse
+
+ def __getstate__(self) -> dict:
+ """Needed to enable pickling due to use of :attr:`__slots__` and weakrefs.
+
+ *See also* :meth:`object.__getstate__`
+ """
+ state = {}
+ for cls in self.__class__.__mro__:
+ slots = getattr(cls, '__slots__', ())
+ for slot in slots:
+ if hasattr(self, slot):
+ state[slot] = getattr(self, slot)
+ # weakrefs can't be pickled.
+ state.pop('_invweak', None) # Added back in __setstate__ via _init_inv call.
+ state.pop('__weakref__', None) # Not added back in __setstate__. Python manages this one.
+ return state
+
+ def __setstate__(self, state: dict) -> None:
+ """Implemented because use of :attr:`__slots__` would prevent unpickling otherwise.
+
+ *See also* :meth:`object.__setstate__`
+ """
+ for slot, value in state.items():
+ setattr(self, slot, value)
+ self._init_inv()
+
+ def __repr__(self) -> str:
+ """See :func:`repr`."""
+ clsname = self.__class__.__name__
+ if not self:
+ return f'{clsname}()'
+ return f'{clsname}({self._repr_delegate(self.items())})'
+
+ # The inherited Mapping.__eq__ implementation would work, but it's implemented in terms of an
+ # inefficient ``dict(self.items()) == dict(other.items())`` comparison, so override it with a
+ # more efficient implementation.
+ def __eq__(self, other: object) -> bool:
+ """*x.__eq__(other) ⟺ x == other*
+
+ Equivalent to *dict(x.items()) == dict(other.items())*
+ but more efficient.
+
+ Note that :meth:`bidict's __eq__() ` implementation
+ is inherited by subclasses,
+ in particular by the ordered bidict subclasses,
+ so even with ordered bidicts,
+ :ref:`== comparison is order-insensitive `.
+
+ *See also* :meth:`bidict.FrozenOrderedBidict.equals_order_sensitive`
+ """
+ if not isinstance(other, _t.Mapping) or len(self) != len(other):
+ return False
+ selfget = self.get
+ return all(selfget(k, _NONE) == v for (k, v) in other.items()) # type: ignore
+
+ # The following methods are mutating and so are not public. But they are implemented in this
+ # non-mutable base class (rather than the mutable `bidict` subclass) because they are used here
+ # during initialization (starting with the `_update` method). (Why is this? Because `__init__`
+ # and `update` share a lot of the same behavior (inserting the provided items while respecting
+ # `on_dup`), so it makes sense for them to share implementation too.)
+ def _pop(self, key: KT) -> VT:
+ val = self._fwdm.pop(key)
+ del self._invm[val]
+ return val
+
+ def _put(self, key: KT, val: VT, on_dup: OnDup) -> None:
+ dedup_result = self._dedup_item(key, val, on_dup)
+ if dedup_result is not None:
+ self._write_item(key, val, dedup_result)
+
+ def _dedup_item(self, key: KT, val: VT, on_dup: OnDup) -> _t.Optional[_DedupResult]:
+ """Check *key* and *val* for any duplication in self.
+
+ Handle any duplication as per the passed in *on_dup*.
+
+ (key, val) already present is construed as a no-op, not a duplication.
+
+ If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
+ :attr:`~bidict.DROP_NEW`, return None.
+
+ If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
+ :attr:`~bidict.RAISE`, raise the appropriate error.
+
+ If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
+ :attr:`~bidict.DROP_OLD`,
+ or if no duplication is found,
+ return the :class:`_DedupResult` *(isdupkey, isdupval, oldkey, oldval)*.
+ """
+ fwdm = self._fwdm
+ invm = self._invm
+ oldval: OVT = fwdm.get(key, _NONE)
+ oldkey: OKT = invm.get(val, _NONE)
+ isdupkey = oldval is not _NONE
+ isdupval = oldkey is not _NONE
+ dedup_result = _DedupResult(isdupkey, isdupval, oldkey, oldval)
+ if isdupkey and isdupval:
+ if self._already_have(key, val, oldkey, oldval):
+ # (key, val) duplicates an existing item -> no-op.
+ return None
+ # key and val each duplicate a different existing item.
+ if on_dup.kv is RAISE:
+ raise KeyAndValueDuplicationError(key, val)
+ if on_dup.kv is DROP_NEW:
+ return None
+ assert on_dup.kv is DROP_OLD
+ # Fall through to the return statement on the last line.
+ elif isdupkey:
+ if on_dup.key is RAISE:
+ raise KeyDuplicationError(key)
+ if on_dup.key is DROP_NEW:
+ return None
+ assert on_dup.key is DROP_OLD
+ # Fall through to the return statement on the last line.
+ elif isdupval:
+ if on_dup.val is RAISE:
+ raise ValueDuplicationError(val)
+ if on_dup.val is DROP_NEW:
+ return None
+ assert on_dup.val is DROP_OLD
+ # Fall through to the return statement on the last line.
+ # else neither isdupkey nor isdupval.
+ return dedup_result
+
+ @staticmethod
+ def _already_have(key: KT, val: VT, oldkey: OKT, oldval: OVT) -> bool:
+ # Overridden by _orderedbase.OrderedBidictBase.
+ isdup = oldkey == key
+ assert isdup == (oldval == val), f'{key} {val} {oldkey} {oldval}'
+ return isdup
+
+ def _write_item(self, key: KT, val: VT, dedup_result: _DedupResult) -> _WriteResult:
+ # Overridden by _orderedbase.OrderedBidictBase.
+ isdupkey, isdupval, oldkey, oldval = dedup_result
+ fwdm = self._fwdm
+ invm = self._invm
+ fwdm[key] = val
+ invm[val] = key
+ if isdupkey:
+ del invm[oldval]
+ if isdupval:
+ del fwdm[oldkey]
+ return _WriteResult(key, val, oldkey, oldval)
+
+ def _update(self, init: bool, on_dup: OnDup, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ # args[0] may be a generator that yields many items, so process input in a single pass.
+ if not args and not kw:
+ return
+ can_skip_dup_check = not self and not kw and isinstance(args[0], BidirectionalMapping)
+ if can_skip_dup_check:
+ self._update_no_dup_check(args[0]) # type: ignore
+ return
+ can_skip_rollback = init or RAISE not in on_dup
+ if can_skip_rollback:
+ self._update_no_rollback(on_dup, *args, **kw)
+ else:
+ self._update_with_rollback(on_dup, *args, **kw)
+
+ def _update_no_dup_check(self, other: BidirectionalMapping[KT, VT]) -> None:
+ write_item = self._write_item
+ for (key, val) in other.items():
+ write_item(key, val, _NODUP)
+
+ def _update_no_rollback(self, on_dup: OnDup, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ put = self._put
+ for (key, val) in _iteritems_args_kw(*args, **kw):
+ put(key, val, on_dup)
+
+ def _update_with_rollback(self, on_dup: OnDup, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ """Update, rolling back on failure."""
+ writes: _t.List[_t.Tuple[_DedupResult, _WriteResult]] = []
+ append_write = writes.append
+ dedup_item = self._dedup_item
+ write_item = self._write_item
+ for (key, val) in _iteritems_args_kw(*args, **kw):
+ try:
+ dedup_result = dedup_item(key, val, on_dup)
+ except DuplicationError:
+ undo_write = self._undo_write
+ for dedup_result, write_result in reversed(writes):
+ undo_write(dedup_result, write_result)
+ raise
+ if dedup_result is not None:
+ write_result = write_item(key, val, dedup_result)
+ append_write((dedup_result, write_result))
+
+ def _undo_write(self, dedup_result: _DedupResult, write_result: _WriteResult) -> None:
+ isdupkey, isdupval, _, _ = dedup_result
+ key, val, oldkey, oldval = write_result
+ if not isdupkey and not isdupval:
+ self._pop(key)
+ return
+ fwdm = self._fwdm
+ invm = self._invm
+ if isdupkey:
+ fwdm[key] = oldval
+ invm[oldval] = key
+ if not isdupval:
+ del invm[val]
+ if isdupval:
+ invm[val] = oldkey
+ fwdm[oldkey] = val
+ if not isdupkey:
+ del fwdm[key]
+
+ def copy(self: BT) -> BT:
+ """A shallow copy."""
+ # Could just ``return self.__class__(self)`` here instead, but the below is faster. It uses
+ # __new__ to create a copy instance while bypassing its __init__, which would result
+ # in copying this bidict's items into the copy instance one at a time. Instead, make whole
+ # copies of each of the backing mappings, and make them the backing mappings of the copy,
+ # avoiding copying items one at a time.
+ cp = self.__class__.__new__(self.__class__)
+ cp._fwdm = copy(self._fwdm)
+ cp._invm = copy(self._invm)
+ cp._init_inv()
+ return cp # type: ignore
+
+ #: Used for the copy protocol.
+ #: *See also* the :mod:`copy` module
+ __copy__ = copy
+
+ def __len__(self) -> int:
+ """The number of contained items."""
+ return len(self._fwdm)
+
+ def __iter__(self) -> _t.Iterator[KT]:
+ """Iterator over the contained keys."""
+ return iter(self._fwdm)
+
+ def __getitem__(self, key: KT) -> VT:
+ """*x.__getitem__(key) ⟺ x[key]*"""
+ return self._fwdm[key]
+
+
+# Work around weakref slot with Generics bug on Python 3.6 (https://bugs.python.org/issue41451):
+BidictBase.__slots__.remove('__weakref__')
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _abc.py Current: _base.py Next: _frozenbidict.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_bidict.py b/matteo_env/Lib/site-packages/bidict/_bidict.py
new file mode 100644
index 0000000..5f559c3
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_bidict.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _mut.py Current: _bidict.py Next: _orderedbase.py →
+#==============================================================================
+
+
+"""Provide :class:`bidict`."""
+
+import typing as _t
+
+from ._delegating import _DelegatingBidict
+from ._mut import MutableBidict
+from ._typing import KT, VT
+
+
+class bidict(_DelegatingBidict[KT, VT], MutableBidict[KT, VT]):
+ """Base class for mutable bidirectional mappings."""
+
+ __slots__ = ()
+
+ if _t.TYPE_CHECKING:
+ @property
+ def inverse(self) -> 'bidict[VT, KT]': ...
+
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _mut.py Current: _bidict.py Next: _orderedbase.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_delegating.py b/matteo_env/Lib/site-packages/bidict/_delegating.py
new file mode 100644
index 0000000..a70183d
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_delegating.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""Provide :class:`_DelegatingBidict`."""
+
+import typing as _t
+
+from ._base import BidictBase
+from ._typing import KT, VT
+
+
+class _DelegatingBidict(BidictBase[KT, VT]):
+ """Provide optimized implementations of several methods by delegating to backing dicts.
+
+ Used to override less efficient implementations inherited by :class:`~collections.abc.Mapping`.
+ """
+
+ __slots__ = ()
+
+ def __iter__(self) -> _t.Iterator[KT]:
+ """Iterator over the contained keys."""
+ return iter(self._fwdm)
+
+ def keys(self) -> _t.KeysView[KT]:
+ """A set-like object providing a view on the contained keys."""
+ return self._fwdm.keys()
+
+ def values(self) -> _t.KeysView[VT]: # type: ignore # https://github.com/python/typeshed/issues/4435
+ """A set-like object providing a view on the contained values."""
+ return self._invm.keys()
+
+ def items(self) -> _t.ItemsView[KT, VT]:
+ """A set-like object providing a view on the contained items."""
+ return self._fwdm.items()
diff --git a/matteo_env/Lib/site-packages/bidict/_dup.py b/matteo_env/Lib/site-packages/bidict/_dup.py
new file mode 100644
index 0000000..ca44d2c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_dup.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""Provide :class:`OnDup` and related functionality."""
+
+
+from collections import namedtuple
+from enum import Enum
+
+
+class OnDupAction(Enum):
+ """An action to take to prevent duplication from occurring."""
+
+ #: Raise a :class:`~bidict.DuplicationError`.
+ RAISE = 'RAISE'
+ #: Overwrite existing items with new items.
+ DROP_OLD = 'DROP_OLD'
+ #: Keep existing items and drop new items.
+ DROP_NEW = 'DROP_NEW'
+
+ def __repr__(self) -> str:
+ return f'<{self.name}>'
+
+
+RAISE = OnDupAction.RAISE
+DROP_OLD = OnDupAction.DROP_OLD
+DROP_NEW = OnDupAction.DROP_NEW
+
+
+class OnDup(namedtuple('_OnDup', 'key val kv')):
+ r"""A 3-tuple of :class:`OnDupAction`\s specifying how to handle the 3 kinds of duplication.
+
+ *See also* :ref:`basic-usage:Values Must Be Unique`
+
+ If *kv* is not specified, *val* will be used for *kv*.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, key: OnDupAction = DROP_OLD, val: OnDupAction = RAISE, kv: OnDupAction = RAISE) -> 'OnDup':
+ """Override to provide user-friendly default values."""
+ return super().__new__(cls, key, val, kv or val)
+
+
+#: Default :class:`OnDup` used for the
+#: :meth:`~bidict.bidict.__init__`,
+#: :meth:`~bidict.bidict.__setitem__`, and
+#: :meth:`~bidict.bidict.update` methods.
+ON_DUP_DEFAULT = OnDup()
+#: An :class:`OnDup` whose members are all :obj:`RAISE`.
+ON_DUP_RAISE = OnDup(key=RAISE, val=RAISE, kv=RAISE)
+#: An :class:`OnDup` whose members are all :obj:`DROP_OLD`.
+ON_DUP_DROP_OLD = OnDup(key=DROP_OLD, val=DROP_OLD, kv=DROP_OLD)
diff --git a/matteo_env/Lib/site-packages/bidict/_exc.py b/matteo_env/Lib/site-packages/bidict/_exc.py
new file mode 100644
index 0000000..dd38d75
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_exc.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""Provide all bidict exceptions."""
+
+
+class BidictException(Exception):
+ """Base class for bidict exceptions."""
+
+
+class DuplicationError(BidictException):
+ """Base class for exceptions raised when uniqueness is violated
+ as per the :attr:~bidict.RAISE` :class:`~bidict.OnDupAction`.
+ """
+
+
+class KeyDuplicationError(DuplicationError):
+ """Raised when a given key is not unique."""
+
+
+class ValueDuplicationError(DuplicationError):
+ """Raised when a given value is not unique."""
+
+
+class KeyAndValueDuplicationError(KeyDuplicationError, ValueDuplicationError):
+ """Raised when a given item's key and value are not unique.
+
+ That is, its key duplicates that of another item,
+ and its value duplicates that of a different other item.
+ """
diff --git a/matteo_env/Lib/site-packages/bidict/_frozenbidict.py b/matteo_env/Lib/site-packages/bidict/_frozenbidict.py
new file mode 100644
index 0000000..d42ca4c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_frozenbidict.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _base.py Current: _frozenbidict.py Next: _mut.py →
+#==============================================================================
+
+"""Provide :class:`frozenbidict`, an immutable, hashable bidirectional mapping type."""
+
+import typing as _t
+
+from ._delegating import _DelegatingBidict
+from ._typing import KT, VT
+
+
+class frozenbidict(_DelegatingBidict[KT, VT]):
+ """Immutable, hashable bidict type."""
+
+ __slots__ = ()
+
+ # Work around lack of support for higher-kinded types in mypy.
+ # Ref: https://github.com/python/typing/issues/548#issuecomment-621571821
+ # Remove this and similar type stubs from other classes if support is ever added.
+ if _t.TYPE_CHECKING:
+ @property
+ def inverse(self) -> 'frozenbidict[VT, KT]': ...
+
+ def __hash__(self) -> int:
+ """The hash of this bidict as determined by its items."""
+ if getattr(self, '_hash', None) is None:
+ self._hash = _t.ItemsView(self)._hash() # type: ignore
+ return self._hash # type: ignore
+
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _base.py Current: _frozenbidict.py Next: _mut.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_frozenordered.py b/matteo_env/Lib/site-packages/bidict/_frozenordered.py
new file mode 100644
index 0000000..bd79c1e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_frozenordered.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+#← Prev: _orderedbase.py Current: _frozenordered.py Next: _orderedbidict.py →
+#==============================================================================
+
+"""Provide :class:`FrozenOrderedBidict`, an immutable, hashable, ordered bidict."""
+
+import typing as _t
+
+from ._frozenbidict import frozenbidict
+from ._orderedbase import OrderedBidictBase
+from ._typing import KT, VT
+
+
+class FrozenOrderedBidict(OrderedBidictBase[KT, VT]):
+ """Hashable, immutable, ordered bidict type."""
+
+ __slots__ = ()
+ __hash__ = frozenbidict.__hash__
+
+ if _t.TYPE_CHECKING:
+ @property
+ def inverse(self) -> 'FrozenOrderedBidict[VT, KT]': ...
+
+ # Assume the Python implementation's dict type is ordered (e.g. PyPy or CPython >= 3.6), so we
+ # can delegate to `_fwdm` and `_invm` for faster implementations of several methods. Both
+ # `_fwdm` and `_invm` will always be initialized with the provided items in the correct order,
+ # and since `FrozenOrderedBidict` is immutable, their respective orders can't get out of sync
+ # after a mutation.
+ def __iter__(self) -> _t.Iterator[KT]:
+ """Iterator over the contained keys in insertion order."""
+ return self._iter()
+
+ def _iter(self, *, reverse: bool = False) -> _t.Iterator[KT]:
+ if reverse:
+ return super()._iter(reverse=True)
+ return iter(self._fwdm._fwdm)
+
+ def keys(self) -> _t.KeysView[KT]:
+ """A set-like object providing a view on the contained keys."""
+ return self._fwdm._fwdm.keys()
+
+ def values(self) -> _t.KeysView[VT]: # type: ignore
+ """A set-like object providing a view on the contained values."""
+ return self._invm._fwdm.keys()
+
+ # We can't delegate for items because values in `_fwdm` are nodes.
+
+
+# * Code review nav *
+#==============================================================================
+#← Prev: _orderedbase.py Current: _frozenordered.py Next: _orderedbidict.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_iter.py b/matteo_env/Lib/site-packages/bidict/_iter.py
new file mode 100644
index 0000000..32d4279
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_iter.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""Functions for iterating over items in a mapping."""
+
+import typing as _t
+from collections.abc import Mapping
+from itertools import chain, repeat
+
+from ._typing import KT, VT, IterItems, MapOrIterItems
+
+
+_NULL_IT = repeat(None, 0) # repeat 0 times -> raise StopIteration from the start
+
+
+def _iteritems_mapping_or_iterable(arg: MapOrIterItems[KT, VT]) -> IterItems[KT, VT]:
+ """Yield the items in *arg*.
+
+ If *arg* is a :class:`~collections.abc.Mapping`, return an iterator over its items.
+ Otherwise return an iterator over *arg* itself.
+ """
+ return iter(arg.items() if isinstance(arg, Mapping) else arg)
+
+
+def _iteritems_args_kw(*args: MapOrIterItems[KT, VT], **kw: VT) -> IterItems[KT, VT]:
+ """Yield the items from the positional argument (if given) and then any from *kw*.
+
+ :raises TypeError: if more than one positional argument is given.
+ """
+ args_len = len(args)
+ if args_len > 1:
+ raise TypeError(f'Expected at most 1 positional argument, got {args_len}')
+ itemchain = None
+ if args:
+ arg = args[0]
+ if arg:
+ itemchain = _iteritems_mapping_or_iterable(arg)
+ if kw:
+ iterkw = iter(kw.items())
+ itemchain = chain(itemchain, iterkw) if itemchain else iterkw # type: ignore
+ return itemchain or _NULL_IT # type: ignore
+
+
+@_t.overload
+def inverted(arg: _t.Mapping[KT, VT]) -> IterItems[VT, KT]: ...
+@_t.overload
+def inverted(arg: IterItems[KT, VT]) -> IterItems[VT, KT]: ...
+def inverted(arg: MapOrIterItems[KT, VT]) -> IterItems[VT, KT]:
+ """Yield the inverse items of the provided object.
+
+ If *arg* has a :func:`callable` ``__inverted__`` attribute,
+ return the result of calling it.
+
+ Otherwise, return an iterator over the items in `arg`,
+ inverting each item on the fly.
+
+ *See also* :attr:`bidict.BidirectionalMapping.__inverted__`
+ """
+ inv = getattr(arg, '__inverted__', None)
+ if callable(inv):
+ return inv() # type: ignore
+ return ((val, key) for (key, val) in _iteritems_mapping_or_iterable(arg))
diff --git a/matteo_env/Lib/site-packages/bidict/_mut.py b/matteo_env/Lib/site-packages/bidict/_mut.py
new file mode 100644
index 0000000..aa4e370
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_mut.py
@@ -0,0 +1,188 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _frozenbidict.py Current: _mut.py Next: _bidict.py →
+#==============================================================================
+
+
+"""Provide :class:`MutableBidict`."""
+
+import typing as _t
+
+from ._abc import MutableBidirectionalMapping
+from ._base import BidictBase
+from ._dup import OnDup, ON_DUP_RAISE, ON_DUP_DROP_OLD
+from ._typing import _NONE, KT, VT, VDT, IterItems, MapOrIterItems
+
+
+class MutableBidict(BidictBase[KT, VT], MutableBidirectionalMapping[KT, VT]):
+ """Base class for mutable bidirectional mappings."""
+
+ __slots__ = ()
+
+ if _t.TYPE_CHECKING:
+ @property
+ def inverse(self) -> 'MutableBidict[VT, KT]': ...
+
+ def __delitem__(self, key: KT) -> None:
+ """*x.__delitem__(y) ⟺ del x[y]*"""
+ self._pop(key)
+
+ def __setitem__(self, key: KT, val: VT) -> None:
+ """Set the value for *key* to *val*.
+
+ If *key* is already associated with *val*, this is a no-op.
+
+ If *key* is already associated with a different value,
+ the old value will be replaced with *val*,
+ as with dict's :meth:`__setitem__`.
+
+ If *val* is already associated with a different key,
+ an exception is raised
+ to protect against accidental removal of the key
+ that's currently associated with *val*.
+
+ Use :meth:`put` instead if you want to specify different behavior in
+ the case that the provided key or value duplicates an existing one.
+ Or use :meth:`forceput` to unconditionally associate *key* with *val*,
+ replacing any existing items as necessary to preserve uniqueness.
+
+ :raises bidict.ValueDuplicationError: if *val* duplicates that of an
+ existing item.
+
+ :raises bidict.KeyAndValueDuplicationError: if *key* duplicates the key of an
+ existing item and *val* duplicates the value of a different
+ existing item.
+ """
+ self._put(key, val, self.on_dup)
+
+ def put(self, key: KT, val: VT, on_dup: OnDup = ON_DUP_RAISE) -> None:
+ """Associate *key* with *val*, honoring the :class:`OnDup` given in *on_dup*.
+
+ For example, if *on_dup* is :attr:`~bidict.ON_DUP_RAISE`,
+ then *key* will be associated with *val* if and only if
+ *key* is not already associated with an existing value and
+ *val* is not already associated with an existing key,
+ otherwise an exception will be raised.
+
+ If *key* is already associated with *val*, this is a no-op.
+
+ :raises bidict.KeyDuplicationError: if attempting to insert an item
+ whose key only duplicates an existing item's, and *on_dup.key* is
+ :attr:`~bidict.RAISE`.
+
+ :raises bidict.ValueDuplicationError: if attempting to insert an item
+ whose value only duplicates an existing item's, and *on_dup.val* is
+ :attr:`~bidict.RAISE`.
+
+ :raises bidict.KeyAndValueDuplicationError: if attempting to insert an
+ item whose key duplicates one existing item's, and whose value
+ duplicates another existing item's, and *on_dup.kv* is
+ :attr:`~bidict.RAISE`.
+ """
+ self._put(key, val, on_dup)
+
+ def forceput(self, key: KT, val: VT) -> None:
+ """Associate *key* with *val* unconditionally.
+
+ Replace any existing mappings containing key *key* or value *val*
+ as necessary to preserve uniqueness.
+ """
+ self._put(key, val, ON_DUP_DROP_OLD)
+
+ def clear(self) -> None:
+ """Remove all items."""
+ self._fwdm.clear()
+ self._invm.clear()
+
+ @_t.overload
+ def pop(self, key: KT) -> VT: ...
+ @_t.overload
+ def pop(self, key: KT, default: VDT = ...) -> VDT: ...
+ def pop(self, key: KT, default: VDT = _NONE) -> VDT:
+ """*x.pop(k[, d]) → v*
+
+ Remove specified key and return the corresponding value.
+
+ :raises KeyError: if *key* is not found and no *default* is provided.
+ """
+ try:
+ return self._pop(key)
+ except KeyError:
+ if default is _NONE:
+ raise
+ return default
+
+ def popitem(self) -> _t.Tuple[KT, VT]:
+ """*x.popitem() → (k, v)*
+
+ Remove and return some item as a (key, value) pair.
+
+ :raises KeyError: if *x* is empty.
+ """
+ if not self:
+ raise KeyError('mapping is empty')
+ key, val = self._fwdm.popitem()
+ del self._invm[val]
+ return key, val
+
+ @_t.overload
+ def update(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def update(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def update(self, **kw: VT) -> None: ...
+ def update(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ """Like calling :meth:`putall` with *self.on_dup* passed for *on_dup*."""
+ if args or kw:
+ self._update(False, self.on_dup, *args, **kw)
+
+ @_t.overload
+ def forceupdate(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def forceupdate(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def forceupdate(self, **kw: VT) -> None: ...
+ def forceupdate(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ """Like a bulk :meth:`forceput`."""
+ self._update(False, ON_DUP_DROP_OLD, *args, **kw)
+
+ @_t.overload
+ def putall(self, items: _t.Mapping[KT, VT], on_dup: OnDup) -> None: ...
+ @_t.overload
+ def putall(self, items: IterItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None: ...
+ def putall(self, items: MapOrIterItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None:
+ """Like a bulk :meth:`put`.
+
+ If one of the given items causes an exception to be raised,
+ none of the items is inserted.
+ """
+ if items:
+ self._update(False, on_dup, items)
+
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _frozenbidict.py Current: _mut.py Next: _bidict.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_named.py b/matteo_env/Lib/site-packages/bidict/_named.py
new file mode 100644
index 0000000..7a22722
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_named.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Provide :func:`bidict.namedbidict`."""
+
+import typing as _t
+from sys import _getframe
+
+from ._abc import BidirectionalMapping, KT, VT
+from ._bidict import bidict
+
+
+def namedbidict(
+ typename: str,
+ keyname: str,
+ valname: str,
+ *,
+ base_type: _t.Type[BidirectionalMapping[KT, VT]] = bidict,
+) -> _t.Type[BidirectionalMapping[KT, VT]]:
+ r"""Create a new subclass of *base_type* with custom accessors.
+
+ Like :func:`collections.namedtuple` for bidicts.
+
+ The new class's ``__name__`` and ``__qualname__`` will be set to *typename*,
+ and its ``__module__`` will be set to the caller's module.
+
+ Instances of the new class will provide access to their
+ :attr:`inverse ` instances
+ via the custom *keyname*\_for property,
+ and access to themselves
+ via the custom *valname*\_for property.
+
+ *See also* the :ref:`namedbidict usage documentation
+ `
+
+ :raises ValueError: if any of the *typename*, *keyname*, or *valname*
+ strings is not a valid Python identifier, or if *keyname == valname*.
+
+ :raises TypeError: if *base_type* is not a :class:`BidirectionalMapping` subclass
+ that provides ``_isinv`` and :meth:`~object.__getstate__` attributes.
+ (Any :class:`~bidict.BidictBase` subclass can be passed in, including all the
+ concrete bidict types pictured in the :ref:`other-bidict-types:Bidict Types Diagram`.
+ """
+ if not issubclass(base_type, BidirectionalMapping) or not all(hasattr(base_type, i) for i in ('_isinv', '__getstate__')):
+ raise TypeError(base_type)
+ names = (typename, keyname, valname)
+ if not all(map(str.isidentifier, names)) or keyname == valname:
+ raise ValueError(names)
+
+ class _Named(base_type): # type: ignore
+
+ __slots__ = ()
+
+ def _getfwd(self) -> '_Named':
+ return self.inverse if self._isinv else self # type: ignore
+
+ def _getinv(self) -> '_Named':
+ return self if self._isinv else self.inverse # type: ignore
+
+ @property
+ def _keyname(self) -> str:
+ return valname if self._isinv else keyname
+
+ @property
+ def _valname(self) -> str:
+ return keyname if self._isinv else valname
+
+ def __reduce__(self) -> '_t.Tuple[_t.Callable[[str, str, str, _t.Type[BidirectionalMapping]], BidirectionalMapping], _t.Tuple[str, str, str, _t.Type[BidirectionalMapping]], dict]':
+ return (_make_empty, (typename, keyname, valname, base_type), self.__getstate__())
+
+ bname = base_type.__name__
+ fname = valname + '_for'
+ iname = keyname + '_for'
+ fdoc = f'{typename} forward {bname}: {keyname} → {valname}'
+ idoc = f'{typename} inverse {bname}: {valname} → {keyname}'
+ setattr(_Named, fname, property(_Named._getfwd, doc=fdoc))
+ setattr(_Named, iname, property(_Named._getinv, doc=idoc))
+
+ _Named.__name__ = typename
+ _Named.__qualname__ = typename
+ _Named.__module__ = _getframe(1).f_globals.get('__name__') # type: ignore
+ return _Named
+
+
+def _make_empty(
+ typename: str,
+ keyname: str,
+ valname: str,
+ base_type: _t.Type[BidirectionalMapping] = bidict,
+) -> BidirectionalMapping:
+ """Create a named bidict with the indicated arguments and return an empty instance.
+ Used to make :func:`bidict.namedbidict` instances picklable.
+ """
+ cls = namedbidict(typename, keyname, valname, base_type=base_type)
+ return cls()
diff --git a/matteo_env/Lib/site-packages/bidict/_orderedbase.py b/matteo_env/Lib/site-packages/bidict/_orderedbase.py
new file mode 100644
index 0000000..4bb6890
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_orderedbase.py
@@ -0,0 +1,314 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _bidict.py Current: _orderedbase.py Next: _frozenordered.py →
+#==============================================================================
+
+
+"""Provide :class:`OrderedBidictBase`."""
+
+import typing as _t
+from copy import copy
+from weakref import ref
+
+from ._base import _NONE, _DedupResult, _WriteResult, BidictBase, BT
+from ._bidict import bidict
+from ._typing import KT, VT, IterItems, MapOrIterItems
+
+
+class _Node:
+ """A node in a circular doubly-linked list
+ used to encode the order of items in an ordered bidict.
+
+ Only weak references to the next and previous nodes
+ are held to avoid creating strong reference cycles.
+
+ Because an ordered bidict retains two strong references
+ to each node instance (one from its backing `_fwdm` mapping
+ and one from its `_invm` mapping), a node's refcount will not
+ drop to zero (and so will not be garbage collected) as long as
+ the ordered bidict that contains it is still alive.
+ Because nodes don't have strong reference cycles,
+ once their containing bidict is freed,
+ they too are immediately freed.
+ """
+
+ __slots__ = ('_prv', '_nxt', '__weakref__')
+
+ def __init__(self, prv: '_Node' = None, nxt: '_Node' = None) -> None:
+ self._setprv(prv)
+ self._setnxt(nxt)
+
+ def __repr__(self) -> str:
+ clsname = self.__class__.__name__
+ prv = id(self.prv)
+ nxt = id(self.nxt)
+ return f'{clsname}(prv={prv}, self={id(self)}, nxt={nxt})'
+
+ def _getprv(self) -> '_t.Optional[_Node]':
+ return self._prv() if isinstance(self._prv, ref) else self._prv
+
+ def _setprv(self, prv: '_t.Optional[_Node]') -> None:
+ self._prv = prv and ref(prv)
+
+ prv = property(_getprv, _setprv)
+
+ def _getnxt(self) -> '_t.Optional[_Node]':
+ return self._nxt() if isinstance(self._nxt, ref) else self._nxt
+
+ def _setnxt(self, nxt: '_t.Optional[_Node]') -> None:
+ self._nxt = nxt and ref(nxt)
+
+ nxt = property(_getnxt, _setnxt)
+
+ def __getstate__(self) -> dict:
+ """Return the instance state dictionary
+ but with weakrefs converted to strong refs
+ so that it can be pickled.
+
+ *See also* :meth:`object.__getstate__`
+ """
+ return dict(_prv=self.prv, _nxt=self.nxt)
+
+ def __setstate__(self, state: dict) -> None:
+ """Set the instance state from *state*."""
+ self._setprv(state['_prv'])
+ self._setnxt(state['_nxt'])
+
+
+class _SentinelNode(_Node):
+ """Special node in a circular doubly-linked list
+ that links the first node with the last node.
+ When its next and previous references point back to itself
+ it represents an empty list.
+ """
+
+ __slots__ = ()
+
+ def __init__(self, prv: _Node = None, nxt: _Node = None) -> None:
+ super().__init__(prv or self, nxt or self)
+
+ def __repr__(self) -> str:
+ return ''
+
+ def __bool__(self) -> bool:
+ return False
+
+ def _iter(self, *, reverse: bool = False) -> _t.Iterator[_Node]:
+ """Iterator yielding nodes in the requested order,
+ i.e. traverse the linked list via :attr:`nxt`
+ (or :attr:`prv` if *reverse* is truthy)
+ until reaching a falsy (i.e. sentinel) node.
+ """
+ attr = 'prv' if reverse else 'nxt'
+ node = getattr(self, attr)
+ while node:
+ yield node
+ node = getattr(node, attr)
+
+
+class OrderedBidictBase(BidictBase[KT, VT]):
+ """Base class implementing an ordered :class:`BidirectionalMapping`."""
+
+ __slots__ = ('_sntl',)
+
+ _fwdm_cls = bidict # type: ignore
+ _invm_cls = bidict # type: ignore
+
+ #: The object used by :meth:`__repr__` for printing the contained items.
+ _repr_delegate = list # type: ignore
+
+ @_t.overload
+ def __init__(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def __init__(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
+ @_t.overload
+ def __init__(self, **kw: VT) -> None: ...
+ def __init__(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
+ """Make a new ordered bidirectional mapping.
+ The signature behaves like that of :class:`dict`.
+ Items passed in are added in the order they are passed,
+ respecting the :attr:`on_dup` class attribute in the process.
+
+ The order in which items are inserted is remembered,
+ similar to :class:`collections.OrderedDict`.
+ """
+ self._sntl = _SentinelNode()
+
+ # Like unordered bidicts, ordered bidicts also store two backing one-directional mappings
+ # `_fwdm` and `_invm`. But rather than mapping `key` to `val` and `val` to `key`
+ # (respectively), they map `key` to `nodefwd` and `val` to `nodeinv` (respectively), where
+ # `nodefwd` is `nodeinv` when `key` and `val` are associated with one another.
+
+ # To effect this difference, `_write_item` and `_undo_write` are overridden. But much of the
+ # rest of BidictBase's implementation, including BidictBase.__init__ and BidictBase._update,
+ # are inherited and are able to be reused without modification.
+ super().__init__(*args, **kw)
+
+ if _t.TYPE_CHECKING:
+ @property
+ def inverse(self) -> 'OrderedBidictBase[VT, KT]': ...
+ _fwdm: bidict[KT, _Node] # type: ignore
+ _invm: bidict[VT, _Node] # type: ignore
+
+ def _init_inv(self) -> None:
+ super()._init_inv()
+ self.inverse._sntl = self._sntl
+
+ # Can't reuse BidictBase.copy since ordered bidicts have different internal structure.
+ def copy(self: BT) -> BT:
+ """A shallow copy of this ordered bidict."""
+ # Fast copy implementation bypassing __init__. See comments in :meth:`BidictBase.copy`.
+ cp = self.__class__.__new__(self.__class__)
+ sntl = _SentinelNode()
+ fwdm = copy(self._fwdm)
+ invm = copy(self._invm)
+ cur = sntl
+ nxt = sntl.nxt
+ for (key, val) in self.items():
+ nxt = _Node(cur, sntl)
+ cur.nxt = fwdm[key] = invm[val] = nxt
+ cur = nxt
+ sntl.prv = nxt
+ cp._sntl = sntl
+ cp._fwdm = fwdm
+ cp._invm = invm
+ cp._init_inv()
+ return cp # type: ignore
+
+ __copy__ = copy
+
+ def __getitem__(self, key: KT) -> VT:
+ nodefwd = self._fwdm[key]
+ val = self._invm.inverse[nodefwd]
+ return val
+
+ def _pop(self, key: KT) -> VT:
+ nodefwd = self._fwdm.pop(key)
+ val = self._invm.inverse.pop(nodefwd)
+ nodefwd.prv.nxt = nodefwd.nxt
+ nodefwd.nxt.prv = nodefwd.prv
+ return val
+
+ @staticmethod
+ def _already_have(key: KT, val: VT, nodeinv: _Node, nodefwd: _Node) -> bool: # type: ignore
+ # Overrides _base.BidictBase.
+ return nodeinv is nodefwd
+
+ def _write_item(self, key: KT, val: VT, dedup_result: _DedupResult) -> _WriteResult:
+ # Overrides _base.BidictBase.
+ fwdm = self._fwdm # bidict mapping keys to nodes
+ invm = self._invm # bidict mapping vals to nodes
+ isdupkey, isdupval, nodeinv, nodefwd = dedup_result
+ if not isdupkey and not isdupval:
+ # No key or value duplication -> create and append a new node.
+ sntl = self._sntl
+ last = sntl.prv
+ node = _Node(last, sntl)
+ last.nxt = sntl.prv = fwdm[key] = invm[val] = node
+ oldkey = oldval = _NONE
+ elif isdupkey and isdupval:
+ # Key and value duplication across two different nodes.
+ assert nodefwd is not nodeinv
+ oldval = invm.inverse[nodefwd] # type: ignore
+ oldkey = fwdm.inverse[nodeinv] # type: ignore
+ assert oldkey != key
+ assert oldval != val
+ # We have to collapse nodefwd and nodeinv into a single node, i.e. drop one of them.
+ # Drop nodeinv, so that the item with the same key is the one overwritten in place.
+ nodeinv.prv.nxt = nodeinv.nxt
+ nodeinv.nxt.prv = nodeinv.prv
+ # Don't remove nodeinv's references to its neighbors since
+ # if the update fails, we'll need them to undo this write.
+ # Update fwdm and invm.
+ tmp = fwdm.pop(oldkey) # type: ignore
+ assert tmp is nodeinv
+ tmp = invm.pop(oldval) # type: ignore
+ assert tmp is nodefwd
+ fwdm[key] = invm[val] = nodefwd
+ elif isdupkey:
+ oldval = invm.inverse[nodefwd] # type: ignore
+ oldkey = _NONE
+ oldnodeinv = invm.pop(oldval) # type: ignore
+ assert oldnodeinv is nodefwd
+ invm[val] = nodefwd
+ else: # isdupval
+ oldkey = fwdm.inverse[nodeinv] # type: ignore
+ oldval = _NONE
+ oldnodefwd = fwdm.pop(oldkey) # type: ignore
+ assert oldnodefwd is nodeinv
+ fwdm[key] = nodeinv
+ return _WriteResult(key, val, oldkey, oldval)
+
+ def _undo_write(self, dedup_result: _DedupResult, write_result: _WriteResult) -> None:
+ fwdm = self._fwdm
+ invm = self._invm
+ isdupkey, isdupval, nodeinv, nodefwd = dedup_result
+ key, val, oldkey, oldval = write_result
+ if not isdupkey and not isdupval:
+ self._pop(key)
+ elif isdupkey and isdupval:
+ # Restore original items.
+ nodeinv.prv.nxt = nodeinv.nxt.prv = nodeinv
+ fwdm[oldkey] = invm[val] = nodeinv
+ invm[oldval] = fwdm[key] = nodefwd
+ elif isdupkey:
+ tmp = invm.pop(val)
+ assert tmp is nodefwd
+ invm[oldval] = nodefwd
+ assert fwdm[key] is nodefwd
+ else: # isdupval
+ tmp = fwdm.pop(key)
+ assert tmp is nodeinv
+ fwdm[oldkey] = nodeinv
+ assert invm[val] is nodeinv
+
+ def __iter__(self) -> _t.Iterator[KT]:
+ """Iterator over the contained keys in insertion order."""
+ return self._iter()
+
+ def _iter(self, *, reverse: bool = False) -> _t.Iterator[KT]:
+ fwdm_inv = self._fwdm.inverse
+ for node in self._sntl._iter(reverse=reverse):
+ yield fwdm_inv[node]
+
+ def __reversed__(self) -> _t.Iterator[KT]:
+ """Iterator over the contained keys in reverse insertion order."""
+ yield from self._iter(reverse=True)
+
+ def equals_order_sensitive(self, other: object) -> bool:
+ """Order-sensitive equality check.
+
+ *See also* :ref:`eq-order-insensitive`
+ """
+ # Same short-circuit as BidictBase.__eq__. Factoring out not worth function call overhead.
+ if not isinstance(other, _t.Mapping) or len(self) != len(other):
+ return False
+ return all(i == j for (i, j) in zip(self.items(), other.items()))
+
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _bidict.py Current: _orderedbase.py Next: _frozenordered.py →
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_orderedbidict.py b/matteo_env/Lib/site-packages/bidict/_orderedbidict.py
new file mode 100644
index 0000000..b92b305
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_orderedbidict.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+#==============================================================================
+# * Welcome to the bidict source code *
+#==============================================================================
+
+# Doing a code review? You'll find a "Code review nav" comment like the one
+# below at the top and bottom of the most important source files. This provides
+# a suggested initial path through the source when reviewing.
+#
+# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
+# viewing an outdated version of the code. Please head to GitHub to review the
+# latest version, which contains important improvements over older versions.
+#
+# Thank you for reading and for any feedback you provide.
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _frozenordered.py Current: _orderedbidict.py
+#==============================================================================
+
+
+"""Provide :class:`OrderedBidict`."""
+
+import typing as _t
+
+from ._mut import MutableBidict
+from ._orderedbase import OrderedBidictBase
+from ._typing import KT, VT
+
+
+class OrderedBidict(OrderedBidictBase[KT, VT], MutableBidict[KT, VT]):
+ """Mutable bidict type that maintains items in insertion order."""
+
+ __slots__ = ()
+
+ if _t.TYPE_CHECKING:
+ @property
+ def inverse(self) -> 'OrderedBidict[VT, KT]': ...
+
+ def clear(self) -> None:
+ """Remove all items."""
+ self._fwdm.clear()
+ self._invm.clear()
+ self._sntl.nxt = self._sntl.prv = self._sntl
+
+ def popitem(self, last: bool = True) -> _t.Tuple[KT, VT]:
+ """*x.popitem() → (k, v)*
+
+ Remove and return the most recently added item as a (key, value) pair
+ if *last* is True, else the least recently added item.
+
+ :raises KeyError: if *x* is empty.
+ """
+ if not self:
+ raise KeyError('mapping is empty')
+ key = next((reversed if last else iter)(self)) # type: ignore
+ val = self._pop(key)
+ return key, val
+
+ def move_to_end(self, key: KT, last: bool = True) -> None:
+ """Move an existing key to the beginning or end of this ordered bidict.
+
+ The item is moved to the end if *last* is True, else to the beginning.
+
+ :raises KeyError: if the key does not exist
+ """
+ node = self._fwdm[key]
+ node.prv.nxt = node.nxt
+ node.nxt.prv = node.prv
+ sntl = self._sntl
+ if last:
+ lastnode = sntl.prv
+ node.prv = lastnode
+ node.nxt = sntl
+ sntl.prv = lastnode.nxt = node
+ else:
+ firstnode = sntl.nxt
+ node.prv = sntl
+ node.nxt = firstnode
+ sntl.nxt = firstnode.prv = node
+
+
+# * Code review nav *
+#==============================================================================
+# ← Prev: _frozenordered.py Current: _orderedbidict.py
+#==============================================================================
diff --git a/matteo_env/Lib/site-packages/bidict/_typing.py b/matteo_env/Lib/site-packages/bidict/_typing.py
new file mode 100644
index 0000000..336e886
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_typing.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""Provide typing-related objects."""
+
+import typing as _t
+
+
+KT = _t.TypeVar('KT')
+VT = _t.TypeVar('VT')
+IterItems = _t.Iterable[_t.Tuple[KT, VT]]
+MapOrIterItems = _t.Union[_t.Mapping[KT, VT], IterItems[KT, VT]]
+
+DT = _t.TypeVar('DT') #: for default arguments
+VDT = _t.Union[VT, DT]
+
+
+class _BareReprMeta(type):
+ def __repr__(cls) -> str:
+ return f'<{cls.__name__}>'
+
+
+class _NONE(metaclass=_BareReprMeta):
+ """Sentinel type used to represent 'missing'."""
+
+
+OKT = _t.Union[KT, _NONE] #: optional key type
+OVT = _t.Union[VT, _NONE] #: optional value type
diff --git a/matteo_env/Lib/site-packages/bidict/_version.py b/matteo_env/Lib/site-packages/bidict/_version.py
new file mode 100644
index 0000000..7c956c8
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/_version.py
@@ -0,0 +1,4 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+version = '0.21.2'
diff --git a/matteo_env/Lib/site-packages/bidict/metadata.py b/matteo_env/Lib/site-packages/bidict/metadata.py
new file mode 100644
index 0000000..5cd389f
--- /dev/null
+++ b/matteo_env/Lib/site-packages/bidict/metadata.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Define bidict package metadata."""
+
+
+# _version.py is generated by setuptools_scm (via its `write_to` param, see setup.py)
+try:
+ from ._version import version
+except (ImportError, ValueError, SystemError): # pragma: no cover
+ try:
+ import pkg_resources
+ except ImportError:
+ __version__ = '0.0.0.VERSION_NOT_FOUND'
+ else:
+ try:
+ __version__ = pkg_resources.get_distribution('bidict').version
+ except pkg_resources.DistributionNotFound:
+ __version__ = '0.0.0.VERSION_NOT_FOUND'
+else: # pragma: no cover
+ __version__ = version
+
+try:
+ __version_info__ = tuple(int(p) if i < 3 else p for (i, p) in enumerate(__version__.split('.')))
+except Exception: # pragma: no cover
+ __vesion_info__ = (0, 0, 0, f'PARSE FAILURE: __version__={__version__!r}')
+
+__author__ = 'Joshua Bronson'
+__maintainer__ = 'Joshua Bronson'
+__copyright__ = 'Copyright 2009-2020 Joshua Bronson'
+__email__ = 'jabronson@gmail.com'
+
+# See: ../docs/thanks.rst
+__credits__ = [i.strip() for i in """
+Joshua Bronson, Michael Arntzenius, Francis Carr, Gregory Ewing, Raymond Hettinger, Jozef Knaperek,
+Daniel Pope, Terry Reedy, David Turner, Tom Viner, Richard Sanger, Zeyi Wang
+""".split(',')]
+
+__description__ = 'The bidirectional mapping library for Python.'
+__keywords__ = 'dict dictionary mapping datastructure bimap bijection bijective ' \
+ 'injective inverse reverse bidirectional two-way 2-way'
+
+__license__ = 'MPL 2.0'
+__status__ = 'Beta'
+__url__ = 'https://bidict.readthedocs.io'
diff --git a/matteo_env/Lib/site-packages/bidict/py.typed b/matteo_env/Lib/site-packages/bidict/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/matteo_env/Lib/site-packages/click-7.1.2.dist-info/INSTALLER b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/matteo_env/Lib/site-packages/click-7.1.2.dist-info/LICENSE.rst b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/LICENSE.rst
new file mode 100644
index 0000000..d12a849
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2014 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/matteo_env/Lib/site-packages/click-7.1.2.dist-info/METADATA b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/METADATA
new file mode 100644
index 0000000..00d6974
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.1
+Name: click
+Version: 7.1.2
+Summary: Composable command line interface toolkit
+Home-page: https://palletsprojects.com/p/click/
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://click.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/click
+Project-URL: Issue tracker, https://github.com/pallets/click/issues
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+
+\$ click\_
+==========
+
+Click is a Python package for creating beautiful command line interfaces
+in a composable way with as little code as necessary. It's the "Command
+Line Interface Creation Kit". It's highly configurable but comes with
+sensible defaults out of the box.
+
+It aims to make the process of writing command line tools quick and fun
+while also preventing any frustration caused by the inability to
+implement an intended CLI API.
+
+Click in three points:
+
+- Arbitrary nesting of commands
+- Automatic help page generation
+- Supports lazy loading of subcommands at runtime
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ $ pip install -U click
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+ import click
+
+ @click.command()
+ @click.option("--count", default=1, help="Number of greetings.")
+ @click.option("--name", prompt="Your name", help="The person to greet.")
+ def hello(count, name):
+ """Simple program that greets NAME for a total of COUNT times."""
+ for _ in range(count):
+ click.echo(f"Hello, {name}!")
+
+ if __name__ == '__main__':
+ hello()
+
+.. code-block:: text
+
+ $ python hello.py --count=3
+ Your name: Click
+ Hello, Click!
+ Hello, Click!
+ Hello, Click!
+
+
+Donate
+------
+
+The Pallets organization develops and supports Click and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+- Website: https://palletsprojects.com/p/click/
+- Documentation: https://click.palletsprojects.com/
+- Releases: https://pypi.org/project/click/
+- Code: https://github.com/pallets/click
+- Issue tracker: https://github.com/pallets/click/issues
+- Test status: https://dev.azure.com/pallets/click/_build
+- Official chat: https://discord.gg/t6rrQZH
+
+
diff --git a/matteo_env/Lib/site-packages/click-7.1.2.dist-info/RECORD b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/RECORD
new file mode 100644
index 0000000..ad860ae
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/RECORD
@@ -0,0 +1,40 @@
+click-7.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+click-7.1.2.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
+click-7.1.2.dist-info/METADATA,sha256=LrRgakZKV7Yg3qJqX_plu2WhFW81MzP3EqQmZhHIO8M,2868
+click-7.1.2.dist-info/RECORD,,
+click-7.1.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+click-7.1.2.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6
+click/__init__.py,sha256=FkyGDQ-cbiQxP_lxgUspyFYS48f2S_pTcfKPz-d_RMo,2463
+click/__pycache__/__init__.cpython-38.pyc,,
+click/__pycache__/_bashcomplete.cpython-38.pyc,,
+click/__pycache__/_compat.cpython-38.pyc,,
+click/__pycache__/_termui_impl.cpython-38.pyc,,
+click/__pycache__/_textwrap.cpython-38.pyc,,
+click/__pycache__/_unicodefun.cpython-38.pyc,,
+click/__pycache__/_winconsole.cpython-38.pyc,,
+click/__pycache__/core.cpython-38.pyc,,
+click/__pycache__/decorators.cpython-38.pyc,,
+click/__pycache__/exceptions.cpython-38.pyc,,
+click/__pycache__/formatting.cpython-38.pyc,,
+click/__pycache__/globals.cpython-38.pyc,,
+click/__pycache__/parser.cpython-38.pyc,,
+click/__pycache__/termui.cpython-38.pyc,,
+click/__pycache__/testing.cpython-38.pyc,,
+click/__pycache__/types.cpython-38.pyc,,
+click/__pycache__/utils.cpython-38.pyc,,
+click/_bashcomplete.py,sha256=9J98IHQYmCAr2Jup6TDshUr5FJEen-AoQCZR0K5nKxQ,12309
+click/_compat.py,sha256=AoMaYnZ-3pwtNXuHtlb6_UXsayoG0QZiHKIRy2VFezc,24169
+click/_termui_impl.py,sha256=yNktUMAdjYOU1HMkq915jR3zgAzUNtGSQqSTSSMn3eQ,20702
+click/_textwrap.py,sha256=ajCzkzFly5tjm9foQ5N9_MOeaYJMBjAltuFa69n4iXY,1197
+click/_unicodefun.py,sha256=apLSNEBZgUsQNPMUv072zJ1swqnm0dYVT5TqcIWTt6w,4201
+click/_winconsole.py,sha256=6YDu6Rq1Wxx4w9uinBMK2LHvP83aerZM9GQurlk3QDo,10010
+click/core.py,sha256=V6DJzastGhrC6WTDwV9MSLwcJUdX2Uf1ypmgkjBdn_Y,77650
+click/decorators.py,sha256=3TvEO_BkaHl7k6Eh1G5eC7JK4LKPdpFqH9JP0QDyTlM,11215
+click/exceptions.py,sha256=3pQAyyMFzx5A3eV0Y27WtDTyGogZRbrC6_o5DjjKBbw,8118
+click/formatting.py,sha256=Wb4gqFEpWaKPgAbOvnkCl8p-bEZx5KpM5ZSByhlnJNk,9281
+click/globals.py,sha256=ht7u2kUGI08pAarB4e4yC8Lkkxy6gJfRZyzxEj8EbWQ,1501
+click/parser.py,sha256=mFK-k58JtPpqO0AC36WAr0t5UfzEw1mvgVSyn7WCe9M,15691
+click/termui.py,sha256=G7QBEKIepRIGLvNdGwBTYiEtSImRxvTO_AglVpyHH2s,23998
+click/testing.py,sha256=EUEsDUqNXFgCLhZ0ZFOROpaVDA5I_rijwnNPE6qICgA,12854
+click/types.py,sha256=wuubik4VqgqAw5dvbYFkDt-zSAx97y9TQXuXcVaRyQA,25045
+click/utils.py,sha256=4VEcJ7iEHwjnFuzEuRtkT99o5VG3zqSD7Q2CVzv13WU,15940
diff --git a/matteo_env/Lib/site-packages/click-7.1.2.dist-info/WHEEL b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/WHEEL
new file mode 100644
index 0000000..ef99c6c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/matteo_env/Lib/site-packages/click-7.1.2.dist-info/top_level.txt b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/top_level.txt
new file mode 100644
index 0000000..dca9a90
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click-7.1.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+click
diff --git a/matteo_env/Lib/site-packages/click/__init__.py b/matteo_env/Lib/site-packages/click/__init__.py
new file mode 100644
index 0000000..2b6008f
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/__init__.py
@@ -0,0 +1,79 @@
+"""
+Click is a simple Python module inspired by the stdlib optparse to make
+writing command line scripts fun. Unlike other modules, it's based
+around a simple API that does not come with too much magic and is
+composable.
+"""
+from .core import Argument
+from .core import BaseCommand
+from .core import Command
+from .core import CommandCollection
+from .core import Context
+from .core import Group
+from .core import MultiCommand
+from .core import Option
+from .core import Parameter
+from .decorators import argument
+from .decorators import command
+from .decorators import confirmation_option
+from .decorators import group
+from .decorators import help_option
+from .decorators import make_pass_decorator
+from .decorators import option
+from .decorators import pass_context
+from .decorators import pass_obj
+from .decorators import password_option
+from .decorators import version_option
+from .exceptions import Abort
+from .exceptions import BadArgumentUsage
+from .exceptions import BadOptionUsage
+from .exceptions import BadParameter
+from .exceptions import ClickException
+from .exceptions import FileError
+from .exceptions import MissingParameter
+from .exceptions import NoSuchOption
+from .exceptions import UsageError
+from .formatting import HelpFormatter
+from .formatting import wrap_text
+from .globals import get_current_context
+from .parser import OptionParser
+from .termui import clear
+from .termui import confirm
+from .termui import echo_via_pager
+from .termui import edit
+from .termui import get_terminal_size
+from .termui import getchar
+from .termui import launch
+from .termui import pause
+from .termui import progressbar
+from .termui import prompt
+from .termui import secho
+from .termui import style
+from .termui import unstyle
+from .types import BOOL
+from .types import Choice
+from .types import DateTime
+from .types import File
+from .types import FLOAT
+from .types import FloatRange
+from .types import INT
+from .types import IntRange
+from .types import ParamType
+from .types import Path
+from .types import STRING
+from .types import Tuple
+from .types import UNPROCESSED
+from .types import UUID
+from .utils import echo
+from .utils import format_filename
+from .utils import get_app_dir
+from .utils import get_binary_stream
+from .utils import get_os_args
+from .utils import get_text_stream
+from .utils import open_file
+
+# Controls if click should emit the warning about the use of unicode
+# literals.
+disable_unicode_literals_warning = False
+
+__version__ = "7.1.2"
diff --git a/matteo_env/Lib/site-packages/click/_bashcomplete.py b/matteo_env/Lib/site-packages/click/_bashcomplete.py
new file mode 100644
index 0000000..8bca244
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/_bashcomplete.py
@@ -0,0 +1,375 @@
+import copy
+import os
+import re
+
+from .core import Argument
+from .core import MultiCommand
+from .core import Option
+from .parser import split_arg_string
+from .types import Choice
+from .utils import echo
+
+try:
+ from collections import abc
+except ImportError:
+ import collections as abc
+
+WORDBREAK = "="
+
+# Note, only BASH version 4.4 and later have the nosort option.
+COMPLETION_SCRIPT_BASH = """
+%(complete_func)s() {
+ local IFS=$'\n'
+ COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ %(autocomplete_var)s=complete $1 ) )
+ return 0
+}
+
+%(complete_func)setup() {
+ local COMPLETION_OPTIONS=""
+ local BASH_VERSION_ARR=(${BASH_VERSION//./ })
+ # Only BASH version 4.4 and later have the nosort option.
+ if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] \
+&& [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then
+ COMPLETION_OPTIONS="-o nosort"
+ fi
+
+ complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s
+}
+
+%(complete_func)setup
+"""
+
+COMPLETION_SCRIPT_ZSH = """
+#compdef %(script_names)s
+
+%(complete_func)s() {
+ local -a completions
+ local -a completions_with_descriptions
+ local -a response
+ (( ! $+commands[%(script_names)s] )) && return 1
+
+ response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\
+ COMP_CWORD=$((CURRENT-1)) \\
+ %(autocomplete_var)s=\"complete_zsh\" \\
+ %(script_names)s )}")
+
+ for key descr in ${(kv)response}; do
+ if [[ "$descr" == "_" ]]; then
+ completions+=("$key")
+ else
+ completions_with_descriptions+=("$key":"$descr")
+ fi
+ done
+
+ if [ -n "$completions_with_descriptions" ]; then
+ _describe -V unsorted completions_with_descriptions -U
+ fi
+
+ if [ -n "$completions" ]; then
+ compadd -U -V unsorted -a completions
+ fi
+ compstate[insert]="automenu"
+}
+
+compdef %(complete_func)s %(script_names)s
+"""
+
+COMPLETION_SCRIPT_FISH = (
+ "complete --no-files --command %(script_names)s --arguments"
+ ' "(env %(autocomplete_var)s=complete_fish'
+ " COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t)"
+ ' %(script_names)s)"'
+)
+
+_completion_scripts = {
+ "bash": COMPLETION_SCRIPT_BASH,
+ "zsh": COMPLETION_SCRIPT_ZSH,
+ "fish": COMPLETION_SCRIPT_FISH,
+}
+
+_invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]")
+
+
+def get_completion_script(prog_name, complete_var, shell):
+ cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_"))
+ script = _completion_scripts.get(shell, COMPLETION_SCRIPT_BASH)
+ return (
+ script
+ % {
+ "complete_func": "_{}_completion".format(cf_name),
+ "script_names": prog_name,
+ "autocomplete_var": complete_var,
+ }
+ ).strip() + ";"
+
+
+def resolve_ctx(cli, prog_name, args):
+ """Parse into a hierarchy of contexts. Contexts are connected
+ through the parent variable.
+
+ :param cli: command definition
+ :param prog_name: the program that is running
+ :param args: full list of args
+ :return: the final context/command parsed
+ """
+ ctx = cli.make_context(prog_name, args, resilient_parsing=True)
+ args = ctx.protected_args + ctx.args
+ while args:
+ if isinstance(ctx.command, MultiCommand):
+ if not ctx.command.chain:
+ cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
+ if cmd is None:
+ return ctx
+ ctx = cmd.make_context(
+ cmd_name, args, parent=ctx, resilient_parsing=True
+ )
+ args = ctx.protected_args + ctx.args
+ else:
+ # Walk chained subcommand contexts saving the last one.
+ while args:
+ cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
+ if cmd is None:
+ return ctx
+ sub_ctx = cmd.make_context(
+ cmd_name,
+ args,
+ parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ resilient_parsing=True,
+ )
+ args = sub_ctx.args
+ ctx = sub_ctx
+ args = sub_ctx.protected_args + sub_ctx.args
+ else:
+ break
+ return ctx
+
+
+def start_of_option(param_str):
+ """
+ :param param_str: param_str to check
+ :return: whether or not this is the start of an option declaration
+ (i.e. starts "-" or "--")
+ """
+ return param_str and param_str[:1] == "-"
+
+
+def is_incomplete_option(all_args, cmd_param):
+ """
+ :param all_args: the full original list of args supplied
+ :param cmd_param: the current command paramter
+ :return: whether or not the last option declaration (i.e. starts
+ "-" or "--") is incomplete and corresponds to this cmd_param. In
+ other words whether this cmd_param option can still accept
+ values
+ """
+ if not isinstance(cmd_param, Option):
+ return False
+ if cmd_param.is_flag:
+ return False
+ last_option = None
+ for index, arg_str in enumerate(
+ reversed([arg for arg in all_args if arg != WORDBREAK])
+ ):
+ if index + 1 > cmd_param.nargs:
+ break
+ if start_of_option(arg_str):
+ last_option = arg_str
+
+ return True if last_option and last_option in cmd_param.opts else False
+
+
+def is_incomplete_argument(current_params, cmd_param):
+ """
+ :param current_params: the current params and values for this
+ argument as already entered
+ :param cmd_param: the current command parameter
+ :return: whether or not the last argument is incomplete and
+ corresponds to this cmd_param. In other words whether or not the
+ this cmd_param argument can still accept values
+ """
+ if not isinstance(cmd_param, Argument):
+ return False
+ current_param_values = current_params[cmd_param.name]
+ if current_param_values is None:
+ return True
+ if cmd_param.nargs == -1:
+ return True
+ if (
+ isinstance(current_param_values, abc.Iterable)
+ and cmd_param.nargs > 1
+ and len(current_param_values) < cmd_param.nargs
+ ):
+ return True
+ return False
+
+
+def get_user_autocompletions(ctx, args, incomplete, cmd_param):
+ """
+ :param ctx: context associated with the parsed command
+ :param args: full list of args
+ :param incomplete: the incomplete text to autocomplete
+ :param cmd_param: command definition
+ :return: all the possible user-specified completions for the param
+ """
+ results = []
+ if isinstance(cmd_param.type, Choice):
+ # Choices don't support descriptions.
+ results = [
+ (c, None) for c in cmd_param.type.choices if str(c).startswith(incomplete)
+ ]
+ elif cmd_param.autocompletion is not None:
+ dynamic_completions = cmd_param.autocompletion(
+ ctx=ctx, args=args, incomplete=incomplete
+ )
+ results = [
+ c if isinstance(c, tuple) else (c, None) for c in dynamic_completions
+ ]
+ return results
+
+
+def get_visible_commands_starting_with(ctx, starts_with):
+ """
+ :param ctx: context associated with the parsed command
+ :starts_with: string that visible commands must start with.
+ :return: all visible (not hidden) commands that start with starts_with.
+ """
+ for c in ctx.command.list_commands(ctx):
+ if c.startswith(starts_with):
+ command = ctx.command.get_command(ctx, c)
+ if not command.hidden:
+ yield command
+
+
+def add_subcommand_completions(ctx, incomplete, completions_out):
+ # Add subcommand completions.
+ if isinstance(ctx.command, MultiCommand):
+ completions_out.extend(
+ [
+ (c.name, c.get_short_help_str())
+ for c in get_visible_commands_starting_with(ctx, incomplete)
+ ]
+ )
+
+ # Walk up the context list and add any other completion
+ # possibilities from chained commands
+ while ctx.parent is not None:
+ ctx = ctx.parent
+ if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
+ remaining_commands = [
+ c
+ for c in get_visible_commands_starting_with(ctx, incomplete)
+ if c.name not in ctx.protected_args
+ ]
+ completions_out.extend(
+ [(c.name, c.get_short_help_str()) for c in remaining_commands]
+ )
+
+
+def get_choices(cli, prog_name, args, incomplete):
+ """
+ :param cli: command definition
+ :param prog_name: the program that is running
+ :param args: full list of args
+ :param incomplete: the incomplete text to autocomplete
+ :return: all the possible completions for the incomplete
+ """
+ all_args = copy.deepcopy(args)
+
+ ctx = resolve_ctx(cli, prog_name, args)
+ if ctx is None:
+ return []
+
+ has_double_dash = "--" in all_args
+
+ # In newer versions of bash long opts with '='s are partitioned, but
+ # it's easier to parse without the '='
+ if start_of_option(incomplete) and WORDBREAK in incomplete:
+ partition_incomplete = incomplete.partition(WORDBREAK)
+ all_args.append(partition_incomplete[0])
+ incomplete = partition_incomplete[2]
+ elif incomplete == WORDBREAK:
+ incomplete = ""
+
+ completions = []
+ if not has_double_dash and start_of_option(incomplete):
+ # completions for partial options
+ for param in ctx.command.params:
+ if isinstance(param, Option) and not param.hidden:
+ param_opts = [
+ param_opt
+ for param_opt in param.opts + param.secondary_opts
+ if param_opt not in all_args or param.multiple
+ ]
+ completions.extend(
+ [(o, param.help) for o in param_opts if o.startswith(incomplete)]
+ )
+ return completions
+ # completion for option values from user supplied values
+ for param in ctx.command.params:
+ if is_incomplete_option(all_args, param):
+ return get_user_autocompletions(ctx, all_args, incomplete, param)
+ # completion for argument values from user supplied values
+ for param in ctx.command.params:
+ if is_incomplete_argument(ctx.params, param):
+ return get_user_autocompletions(ctx, all_args, incomplete, param)
+
+ add_subcommand_completions(ctx, incomplete, completions)
+ # Sort before returning so that proper ordering can be enforced in custom types.
+ return sorted(completions)
+
+
+def do_complete(cli, prog_name, include_descriptions):
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
+ args = cwords[1:cword]
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ""
+
+ for item in get_choices(cli, prog_name, args, incomplete):
+ echo(item[0])
+ if include_descriptions:
+ # ZSH has trouble dealing with empty array parameters when
+ # returned from commands, use '_' to indicate no description
+ # is present.
+ echo(item[1] if item[1] else "_")
+
+ return True
+
+
+def do_complete_fish(cli, prog_name):
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ incomplete = os.environ["COMP_CWORD"]
+ args = cwords[1:]
+
+ for item in get_choices(cli, prog_name, args, incomplete):
+ if item[1]:
+ echo("{arg}\t{desc}".format(arg=item[0], desc=item[1]))
+ else:
+ echo(item[0])
+
+ return True
+
+
+def bashcomplete(cli, prog_name, complete_var, complete_instr):
+ if "_" in complete_instr:
+ command, shell = complete_instr.split("_", 1)
+ else:
+ command = complete_instr
+ shell = "bash"
+
+ if command == "source":
+ echo(get_completion_script(prog_name, complete_var, shell))
+ return True
+ elif command == "complete":
+ if shell == "fish":
+ return do_complete_fish(cli, prog_name)
+ elif shell in {"bash", "zsh"}:
+ return do_complete(cli, prog_name, shell == "zsh")
+
+ return False
diff --git a/matteo_env/Lib/site-packages/click/_compat.py b/matteo_env/Lib/site-packages/click/_compat.py
new file mode 100644
index 0000000..60cb115
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/_compat.py
@@ -0,0 +1,786 @@
+# flake8: noqa
+import codecs
+import io
+import os
+import re
+import sys
+from weakref import WeakKeyDictionary
+
+PY2 = sys.version_info[0] == 2
+CYGWIN = sys.platform.startswith("cygwin")
+MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version)
+# Determine local App Engine environment, per Google's own suggestion
+APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get(
+ "SERVER_SOFTWARE", ""
+)
+WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2
+DEFAULT_COLUMNS = 80
+
+
+_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
+
+
+def get_filesystem_encoding():
+ return sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def _make_text_stream(
+ stream, encoding, errors, force_readable=False, force_writable=False
+):
+ if encoding is None:
+ encoding = get_best_encoding(stream)
+ if errors is None:
+ errors = "replace"
+ return _NonClosingTextIOWrapper(
+ stream,
+ encoding,
+ errors,
+ line_buffering=True,
+ force_readable=force_readable,
+ force_writable=force_writable,
+ )
+
+
+def is_ascii_encoding(encoding):
+ """Checks if a given encoding is ascii."""
+ try:
+ return codecs.lookup(encoding).name == "ascii"
+ except LookupError:
+ return False
+
+
+def get_best_encoding(stream):
+ """Returns the default stream encoding if not found."""
+ rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
+ if is_ascii_encoding(rv):
+ return "utf-8"
+ return rv
+
+
+class _NonClosingTextIOWrapper(io.TextIOWrapper):
+ def __init__(
+ self,
+ stream,
+ encoding,
+ errors,
+ force_readable=False,
+ force_writable=False,
+ **extra
+ ):
+ self._stream = stream = _FixupStream(stream, force_readable, force_writable)
+ io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra)
+
+ # The io module is a place where the Python 3 text behavior
+ # was forced upon Python 2, so we need to unbreak
+ # it to look like Python 2.
+ if PY2:
+
+ def write(self, x):
+ if isinstance(x, str) or is_bytes(x):
+ try:
+ self.flush()
+ except Exception:
+ pass
+ return self.buffer.write(str(x))
+ return io.TextIOWrapper.write(self, x)
+
+ def writelines(self, lines):
+ for line in lines:
+ self.write(line)
+
+ def __del__(self):
+ try:
+ self.detach()
+ except Exception:
+ pass
+
+ def isatty(self):
+ # https://bitbucket.org/pypy/pypy/issue/1803
+ return self._stream.isatty()
+
+
+class _FixupStream(object):
+ """The new io interface needs more from streams than streams
+ traditionally implement. As such, this fix-up code is necessary in
+ some circumstances.
+
+ The forcing of readable and writable flags are there because some tools
+ put badly patched objects on sys (one such offender are certain version
+ of jupyter notebook).
+ """
+
+ def __init__(self, stream, force_readable=False, force_writable=False):
+ self._stream = stream
+ self._force_readable = force_readable
+ self._force_writable = force_writable
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+ def read1(self, size):
+ f = getattr(self._stream, "read1", None)
+ if f is not None:
+ return f(size)
+ # We only dispatch to readline instead of read in Python 2 as we
+ # do not want cause problems with the different implementation
+ # of line buffering.
+ if PY2:
+ return self._stream.readline(size)
+ return self._stream.read(size)
+
+ def readable(self):
+ if self._force_readable:
+ return True
+ x = getattr(self._stream, "readable", None)
+ if x is not None:
+ return x()
+ try:
+ self._stream.read(0)
+ except Exception:
+ return False
+ return True
+
+ def writable(self):
+ if self._force_writable:
+ return True
+ x = getattr(self._stream, "writable", None)
+ if x is not None:
+ return x()
+ try:
+ self._stream.write("")
+ except Exception:
+ try:
+ self._stream.write(b"")
+ except Exception:
+ return False
+ return True
+
+ def seekable(self):
+ x = getattr(self._stream, "seekable", None)
+ if x is not None:
+ return x()
+ try:
+ self._stream.seek(self._stream.tell())
+ except Exception:
+ return False
+ return True
+
+
+if PY2:
+ text_type = unicode
+ raw_input = raw_input
+ string_types = (str, unicode)
+ int_types = (int, long)
+ iteritems = lambda x: x.iteritems()
+ range_type = xrange
+
+ def is_bytes(x):
+ return isinstance(x, (buffer, bytearray))
+
+ _identifier_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$")
+
+ # For Windows, we need to force stdout/stdin/stderr to binary if it's
+ # fetched for that. This obviously is not the most correct way to do
+ # it as it changes global state. Unfortunately, there does not seem to
+ # be a clear better way to do it as just reopening the file in binary
+ # mode does not change anything.
+ #
+ # An option would be to do what Python 3 does and to open the file as
+ # binary only, patch it back to the system, and then use a wrapper
+ # stream that converts newlines. It's not quite clear what's the
+ # correct option here.
+ #
+ # This code also lives in _winconsole for the fallback to the console
+ # emulation stream.
+ #
+ # There are also Windows environments where the `msvcrt` module is not
+ # available (which is why we use try-catch instead of the WIN variable
+ # here), such as the Google App Engine development server on Windows. In
+ # those cases there is just nothing we can do.
+ def set_binary_mode(f):
+ return f
+
+ try:
+ import msvcrt
+ except ImportError:
+ pass
+ else:
+
+ def set_binary_mode(f):
+ try:
+ fileno = f.fileno()
+ except Exception:
+ pass
+ else:
+ msvcrt.setmode(fileno, os.O_BINARY)
+ return f
+
+ try:
+ import fcntl
+ except ImportError:
+ pass
+ else:
+
+ def set_binary_mode(f):
+ try:
+ fileno = f.fileno()
+ except Exception:
+ pass
+ else:
+ flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
+ fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK)
+ return f
+
+ def isidentifier(x):
+ return _identifier_re.search(x) is not None
+
+ def get_binary_stdin():
+ return set_binary_mode(sys.stdin)
+
+ def get_binary_stdout():
+ _wrap_std_stream("stdout")
+ return set_binary_mode(sys.stdout)
+
+ def get_binary_stderr():
+ _wrap_std_stream("stderr")
+ return set_binary_mode(sys.stderr)
+
+ def get_text_stdin(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+ if rv is not None:
+ return rv
+ return _make_text_stream(sys.stdin, encoding, errors, force_readable=True)
+
+ def get_text_stdout(encoding=None, errors=None):
+ _wrap_std_stream("stdout")
+ rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+ if rv is not None:
+ return rv
+ return _make_text_stream(sys.stdout, encoding, errors, force_writable=True)
+
+ def get_text_stderr(encoding=None, errors=None):
+ _wrap_std_stream("stderr")
+ rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+ if rv is not None:
+ return rv
+ return _make_text_stream(sys.stderr, encoding, errors, force_writable=True)
+
+ def filename_to_ui(value):
+ if isinstance(value, bytes):
+ value = value.decode(get_filesystem_encoding(), "replace")
+ return value
+
+
+else:
+ import io
+
+ text_type = str
+ raw_input = input
+ string_types = (str,)
+ int_types = (int,)
+ range_type = range
+ isidentifier = lambda x: x.isidentifier()
+ iteritems = lambda x: iter(x.items())
+
+ def is_bytes(x):
+ return isinstance(x, (bytes, memoryview, bytearray))
+
+ def _is_binary_reader(stream, default=False):
+ try:
+ return isinstance(stream.read(0), bytes)
+ except Exception:
+ return default
+ # This happens in some cases where the stream was already
+ # closed. In this case, we assume the default.
+
+ def _is_binary_writer(stream, default=False):
+ try:
+ stream.write(b"")
+ except Exception:
+ try:
+ stream.write("")
+ return False
+ except Exception:
+ pass
+ return default
+ return True
+
+ def _find_binary_reader(stream):
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detaching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_reader(stream, False):
+ return stream
+
+ buf = getattr(stream, "buffer", None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_reader(buf, True):
+ return buf
+
+ def _find_binary_writer(stream):
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detatching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_writer(stream, False):
+ return stream
+
+ buf = getattr(stream, "buffer", None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_writer(buf, True):
+ return buf
+
+ def _stream_is_misconfigured(stream):
+ """A stream is misconfigured if its encoding is ASCII."""
+ # If the stream does not have an encoding set, we assume it's set
+ # to ASCII. This appears to happen in certain unittest
+ # environments. It's not quite clear what the correct behavior is
+ # but this at least will force Click to recover somehow.
+ return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
+
+ def _is_compat_stream_attr(stream, attr, value):
+ """A stream attribute is compatible if it is equal to the
+ desired value or the desired value is unset and the attribute
+ has a value.
+ """
+ stream_value = getattr(stream, attr, None)
+ return stream_value == value or (value is None and stream_value is not None)
+
+ def _is_compatible_text_stream(stream, encoding, errors):
+ """Check if a stream's encoding and errors attributes are
+ compatible with the desired values.
+ """
+ return _is_compat_stream_attr(
+ stream, "encoding", encoding
+ ) and _is_compat_stream_attr(stream, "errors", errors)
+
+ def _force_correct_text_stream(
+ text_stream,
+ encoding,
+ errors,
+ is_binary,
+ find_binary,
+ force_readable=False,
+ force_writable=False,
+ ):
+ if is_binary(text_stream, False):
+ binary_reader = text_stream
+ else:
+ # If the stream looks compatible, and won't default to a
+ # misconfigured ascii encoding, return it as-is.
+ if _is_compatible_text_stream(text_stream, encoding, errors) and not (
+ encoding is None and _stream_is_misconfigured(text_stream)
+ ):
+ return text_stream
+
+ # Otherwise, get the underlying binary reader.
+ binary_reader = find_binary(text_stream)
+
+ # If that's not possible, silently use the original reader
+ # and get mojibake instead of exceptions.
+ if binary_reader is None:
+ return text_stream
+
+ # Default errors to replace instead of strict in order to get
+ # something that works.
+ if errors is None:
+ errors = "replace"
+
+ # Wrap the binary stream in a text stream with the correct
+ # encoding parameters.
+ return _make_text_stream(
+ binary_reader,
+ encoding,
+ errors,
+ force_readable=force_readable,
+ force_writable=force_writable,
+ )
+
+ def _force_correct_text_reader(text_reader, encoding, errors, force_readable=False):
+ return _force_correct_text_stream(
+ text_reader,
+ encoding,
+ errors,
+ _is_binary_reader,
+ _find_binary_reader,
+ force_readable=force_readable,
+ )
+
+ def _force_correct_text_writer(text_writer, encoding, errors, force_writable=False):
+ return _force_correct_text_stream(
+ text_writer,
+ encoding,
+ errors,
+ _is_binary_writer,
+ _find_binary_writer,
+ force_writable=force_writable,
+ )
+
+ def get_binary_stdin():
+ reader = _find_binary_reader(sys.stdin)
+ if reader is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
+ return reader
+
+ def get_binary_stdout():
+ writer = _find_binary_writer(sys.stdout)
+ if writer is None:
+ raise RuntimeError(
+ "Was not able to determine binary stream for sys.stdout."
+ )
+ return writer
+
+ def get_binary_stderr():
+ writer = _find_binary_writer(sys.stderr)
+ if writer is None:
+ raise RuntimeError(
+ "Was not able to determine binary stream for sys.stderr."
+ )
+ return writer
+
+ def get_text_stdin(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_reader(
+ sys.stdin, encoding, errors, force_readable=True
+ )
+
+ def get_text_stdout(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(
+ sys.stdout, encoding, errors, force_writable=True
+ )
+
+ def get_text_stderr(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(
+ sys.stderr, encoding, errors, force_writable=True
+ )
+
+ def filename_to_ui(value):
+ if isinstance(value, bytes):
+ value = value.decode(get_filesystem_encoding(), "replace")
+ else:
+ value = value.encode("utf-8", "surrogateescape").decode("utf-8", "replace")
+ return value
+
+
+def get_streerror(e, default=None):
+ if hasattr(e, "strerror"):
+ msg = e.strerror
+ else:
+ if default is not None:
+ msg = default
+ else:
+ msg = str(e)
+ if isinstance(msg, bytes):
+ msg = msg.decode("utf-8", "replace")
+ return msg
+
+
+def _wrap_io_open(file, mode, encoding, errors):
+ """On Python 2, :func:`io.open` returns a text file wrapper that
+ requires passing ``unicode`` to ``write``. Need to open the file in
+ binary mode then wrap it in a subclass that can write ``str`` and
+ ``unicode``.
+
+ Also handles not passing ``encoding`` and ``errors`` in binary mode.
+ """
+ binary = "b" in mode
+
+ if binary:
+ kwargs = {}
+ else:
+ kwargs = {"encoding": encoding, "errors": errors}
+
+ if not PY2 or binary:
+ return io.open(file, mode, **kwargs)
+
+ f = io.open(file, "{}b".format(mode.replace("t", "")))
+ return _make_text_stream(f, **kwargs)
+
+
+def open_stream(filename, mode="r", encoding=None, errors="strict", atomic=False):
+ binary = "b" in mode
+
+ # Standard streams first. These are simple because they don't need
+ # special handling for the atomic flag. It's entirely ignored.
+ if filename == "-":
+ if any(m in mode for m in ["w", "a", "x"]):
+ if binary:
+ return get_binary_stdout(), False
+ return get_text_stdout(encoding=encoding, errors=errors), False
+ if binary:
+ return get_binary_stdin(), False
+ return get_text_stdin(encoding=encoding, errors=errors), False
+
+ # Non-atomic writes directly go out through the regular open functions.
+ if not atomic:
+ return _wrap_io_open(filename, mode, encoding, errors), True
+
+ # Some usability stuff for atomic writes
+ if "a" in mode:
+ raise ValueError(
+ "Appending to an existing file is not supported, because that"
+ " would involve an expensive `copy`-operation to a temporary"
+ " file. Open the file in normal `w`-mode and copy explicitly"
+ " if that's what you're after."
+ )
+ if "x" in mode:
+ raise ValueError("Use the `overwrite`-parameter instead.")
+ if "w" not in mode:
+ raise ValueError("Atomic writes only make sense with `w`-mode.")
+
+ # Atomic writes are more complicated. They work by opening a file
+ # as a proxy in the same folder and then using the fdopen
+ # functionality to wrap it in a Python file. Then we wrap it in an
+ # atomic file that moves the file over on close.
+ import errno
+ import random
+
+ try:
+ perm = os.stat(filename).st_mode
+ except OSError:
+ perm = None
+
+ flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
+
+ if binary:
+ flags |= getattr(os, "O_BINARY", 0)
+
+ while True:
+ tmp_filename = os.path.join(
+ os.path.dirname(filename),
+ ".__atomic-write{:08x}".format(random.randrange(1 << 32)),
+ )
+ try:
+ fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
+ break
+ except OSError as e:
+ if e.errno == errno.EEXIST or (
+ os.name == "nt"
+ and e.errno == errno.EACCES
+ and os.path.isdir(e.filename)
+ and os.access(e.filename, os.W_OK)
+ ):
+ continue
+ raise
+
+ if perm is not None:
+ os.chmod(tmp_filename, perm) # in case perm includes bits in umask
+
+ f = _wrap_io_open(fd, mode, encoding, errors)
+ return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True
+
+
+# Used in a destructor call, needs extra protection from interpreter cleanup.
+if hasattr(os, "replace"):
+ _replace = os.replace
+ _can_replace = True
+else:
+ _replace = os.rename
+ _can_replace = not WIN
+
+
+class _AtomicFile(object):
+ def __init__(self, f, tmp_filename, real_filename):
+ self._f = f
+ self._tmp_filename = tmp_filename
+ self._real_filename = real_filename
+ self.closed = False
+
+ @property
+ def name(self):
+ return self._real_filename
+
+ def close(self, delete=False):
+ if self.closed:
+ return
+ self._f.close()
+ if not _can_replace:
+ try:
+ os.remove(self._real_filename)
+ except OSError:
+ pass
+ _replace(self._tmp_filename, self._real_filename)
+ self.closed = True
+
+ def __getattr__(self, name):
+ return getattr(self._f, name)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.close(delete=exc_type is not None)
+
+ def __repr__(self):
+ return repr(self._f)
+
+
+auto_wrap_for_ansi = None
+colorama = None
+get_winterm_size = None
+
+
+def strip_ansi(value):
+ return _ansi_re.sub("", value)
+
+
+def _is_jupyter_kernel_output(stream):
+ if WIN:
+ # TODO: Couldn't test on Windows, should't try to support until
+ # someone tests the details wrt colorama.
+ return
+
+ while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
+ stream = stream._stream
+
+ return stream.__class__.__module__.startswith("ipykernel.")
+
+
+def should_strip_ansi(stream=None, color=None):
+ if color is None:
+ if stream is None:
+ stream = sys.stdin
+ return not isatty(stream) and not _is_jupyter_kernel_output(stream)
+ return not color
+
+
+# If we're on Windows, we provide transparent integration through
+# colorama. This will make ANSI colors through the echo function
+# work automatically.
+if WIN:
+ # Windows has a smaller terminal
+ DEFAULT_COLUMNS = 79
+
+ from ._winconsole import _get_windows_console_stream, _wrap_std_stream
+
+ def _get_argv_encoding():
+ import locale
+
+ return locale.getpreferredencoding()
+
+ if PY2:
+
+ def raw_input(prompt=""):
+ sys.stderr.flush()
+ if prompt:
+ stdout = _default_text_stdout()
+ stdout.write(prompt)
+ stdin = _default_text_stdin()
+ return stdin.readline().rstrip("\r\n")
+
+ try:
+ import colorama
+ except ImportError:
+ pass
+ else:
+ _ansi_stream_wrappers = WeakKeyDictionary()
+
+ def auto_wrap_for_ansi(stream, color=None):
+ """This function wraps a stream so that calls through colorama
+ are issued to the win32 console API to recolor on demand. It
+ also ensures to reset the colors if a write call is interrupted
+ to not destroy the console afterwards.
+ """
+ try:
+ cached = _ansi_stream_wrappers.get(stream)
+ except Exception:
+ cached = None
+ if cached is not None:
+ return cached
+ strip = should_strip_ansi(stream, color)
+ ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
+ rv = ansi_wrapper.stream
+ _write = rv.write
+
+ def _safe_write(s):
+ try:
+ return _write(s)
+ except:
+ ansi_wrapper.reset_all()
+ raise
+
+ rv.write = _safe_write
+ try:
+ _ansi_stream_wrappers[stream] = rv
+ except Exception:
+ pass
+ return rv
+
+ def get_winterm_size():
+ win = colorama.win32.GetConsoleScreenBufferInfo(
+ colorama.win32.STDOUT
+ ).srWindow
+ return win.Right - win.Left, win.Bottom - win.Top
+
+
+else:
+
+ def _get_argv_encoding():
+ return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding()
+
+ _get_windows_console_stream = lambda *x: None
+ _wrap_std_stream = lambda *x: None
+
+
+def term_len(x):
+ return len(strip_ansi(x))
+
+
+def isatty(stream):
+ try:
+ return stream.isatty()
+ except Exception:
+ return False
+
+
+def _make_cached_stream_func(src_func, wrapper_func):
+ cache = WeakKeyDictionary()
+
+ def func():
+ stream = src_func()
+ try:
+ rv = cache.get(stream)
+ except Exception:
+ rv = None
+ if rv is not None:
+ return rv
+ rv = wrapper_func()
+ try:
+ stream = src_func() # In case wrapper_func() modified the stream
+ cache[stream] = rv
+ except Exception:
+ pass
+ return rv
+
+ return func
+
+
+_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
+_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
+_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
+
+
+binary_streams = {
+ "stdin": get_binary_stdin,
+ "stdout": get_binary_stdout,
+ "stderr": get_binary_stderr,
+}
+
+text_streams = {
+ "stdin": get_text_stdin,
+ "stdout": get_text_stdout,
+ "stderr": get_text_stderr,
+}
diff --git a/matteo_env/Lib/site-packages/click/_termui_impl.py b/matteo_env/Lib/site-packages/click/_termui_impl.py
new file mode 100644
index 0000000..88bec37
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/_termui_impl.py
@@ -0,0 +1,657 @@
+# -*- coding: utf-8 -*-
+"""
+This module contains implementations for the termui module. To keep the
+import time of Click down, some infrequently used functionality is
+placed in this module and only imported as needed.
+"""
+import contextlib
+import math
+import os
+import sys
+import time
+
+from ._compat import _default_text_stdout
+from ._compat import CYGWIN
+from ._compat import get_best_encoding
+from ._compat import int_types
+from ._compat import isatty
+from ._compat import open_stream
+from ._compat import range_type
+from ._compat import strip_ansi
+from ._compat import term_len
+from ._compat import WIN
+from .exceptions import ClickException
+from .utils import echo
+
+if os.name == "nt":
+ BEFORE_BAR = "\r"
+ AFTER_BAR = "\n"
+else:
+ BEFORE_BAR = "\r\033[?25l"
+ AFTER_BAR = "\033[?25h\n"
+
+
+def _length_hint(obj):
+ """Returns the length hint of an object."""
+ try:
+ return len(obj)
+ except (AttributeError, TypeError):
+ try:
+ get_hint = type(obj).__length_hint__
+ except AttributeError:
+ return None
+ try:
+ hint = get_hint(obj)
+ except TypeError:
+ return None
+ if hint is NotImplemented or not isinstance(hint, int_types) or hint < 0:
+ return None
+ return hint
+
+
+class ProgressBar(object):
+ def __init__(
+ self,
+ iterable,
+ length=None,
+ fill_char="#",
+ empty_char=" ",
+ bar_template="%(bar)s",
+ info_sep=" ",
+ show_eta=True,
+ show_percent=None,
+ show_pos=False,
+ item_show_func=None,
+ label=None,
+ file=None,
+ color=None,
+ width=30,
+ ):
+ self.fill_char = fill_char
+ self.empty_char = empty_char
+ self.bar_template = bar_template
+ self.info_sep = info_sep
+ self.show_eta = show_eta
+ self.show_percent = show_percent
+ self.show_pos = show_pos
+ self.item_show_func = item_show_func
+ self.label = label or ""
+ if file is None:
+ file = _default_text_stdout()
+ self.file = file
+ self.color = color
+ self.width = width
+ self.autowidth = width == 0
+
+ if length is None:
+ length = _length_hint(iterable)
+ if iterable is None:
+ if length is None:
+ raise TypeError("iterable or length is required")
+ iterable = range_type(length)
+ self.iter = iter(iterable)
+ self.length = length
+ self.length_known = length is not None
+ self.pos = 0
+ self.avg = []
+ self.start = self.last_eta = time.time()
+ self.eta_known = False
+ self.finished = False
+ self.max_width = None
+ self.entered = False
+ self.current_item = None
+ self.is_hidden = not isatty(self.file)
+ self._last_line = None
+ self.short_limit = 0.5
+
+ def __enter__(self):
+ self.entered = True
+ self.render_progress()
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.render_finish()
+
+ def __iter__(self):
+ if not self.entered:
+ raise RuntimeError("You need to use progress bars in a with block.")
+ self.render_progress()
+ return self.generator()
+
+ def __next__(self):
+ # Iteration is defined in terms of a generator function,
+ # returned by iter(self); use that to define next(). This works
+ # because `self.iter` is an iterable consumed by that generator,
+ # so it is re-entry safe. Calling `next(self.generator())`
+ # twice works and does "what you want".
+ return next(iter(self))
+
+ # Python 2 compat
+ next = __next__
+
+ def is_fast(self):
+ return time.time() - self.start <= self.short_limit
+
+ def render_finish(self):
+ if self.is_hidden or self.is_fast():
+ return
+ self.file.write(AFTER_BAR)
+ self.file.flush()
+
+ @property
+ def pct(self):
+ if self.finished:
+ return 1.0
+ return min(self.pos / (float(self.length) or 1), 1.0)
+
+ @property
+ def time_per_iteration(self):
+ if not self.avg:
+ return 0.0
+ return sum(self.avg) / float(len(self.avg))
+
+ @property
+ def eta(self):
+ if self.length_known and not self.finished:
+ return self.time_per_iteration * (self.length - self.pos)
+ return 0.0
+
+ def format_eta(self):
+ if self.eta_known:
+ t = int(self.eta)
+ seconds = t % 60
+ t //= 60
+ minutes = t % 60
+ t //= 60
+ hours = t % 24
+ t //= 24
+ if t > 0:
+ return "{}d {:02}:{:02}:{:02}".format(t, hours, minutes, seconds)
+ else:
+ return "{:02}:{:02}:{:02}".format(hours, minutes, seconds)
+ return ""
+
+ def format_pos(self):
+ pos = str(self.pos)
+ if self.length_known:
+ pos += "/{}".format(self.length)
+ return pos
+
+ def format_pct(self):
+ return "{: 4}%".format(int(self.pct * 100))[1:]
+
+ def format_bar(self):
+ if self.length_known:
+ bar_length = int(self.pct * self.width)
+ bar = self.fill_char * bar_length
+ bar += self.empty_char * (self.width - bar_length)
+ elif self.finished:
+ bar = self.fill_char * self.width
+ else:
+ bar = list(self.empty_char * (self.width or 1))
+ if self.time_per_iteration != 0:
+ bar[
+ int(
+ (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
+ * self.width
+ )
+ ] = self.fill_char
+ bar = "".join(bar)
+ return bar
+
+ def format_progress_line(self):
+ show_percent = self.show_percent
+
+ info_bits = []
+ if self.length_known and show_percent is None:
+ show_percent = not self.show_pos
+
+ if self.show_pos:
+ info_bits.append(self.format_pos())
+ if show_percent:
+ info_bits.append(self.format_pct())
+ if self.show_eta and self.eta_known and not self.finished:
+ info_bits.append(self.format_eta())
+ if self.item_show_func is not None:
+ item_info = self.item_show_func(self.current_item)
+ if item_info is not None:
+ info_bits.append(item_info)
+
+ return (
+ self.bar_template
+ % {
+ "label": self.label,
+ "bar": self.format_bar(),
+ "info": self.info_sep.join(info_bits),
+ }
+ ).rstrip()
+
+ def render_progress(self):
+ from .termui import get_terminal_size
+
+ if self.is_hidden:
+ return
+
+ buf = []
+ # Update width in case the terminal has been resized
+ if self.autowidth:
+ old_width = self.width
+ self.width = 0
+ clutter_length = term_len(self.format_progress_line())
+ new_width = max(0, get_terminal_size()[0] - clutter_length)
+ if new_width < old_width:
+ buf.append(BEFORE_BAR)
+ buf.append(" " * self.max_width)
+ self.max_width = new_width
+ self.width = new_width
+
+ clear_width = self.width
+ if self.max_width is not None:
+ clear_width = self.max_width
+
+ buf.append(BEFORE_BAR)
+ line = self.format_progress_line()
+ line_len = term_len(line)
+ if self.max_width is None or self.max_width < line_len:
+ self.max_width = line_len
+
+ buf.append(line)
+ buf.append(" " * (clear_width - line_len))
+ line = "".join(buf)
+ # Render the line only if it changed.
+
+ if line != self._last_line and not self.is_fast():
+ self._last_line = line
+ echo(line, file=self.file, color=self.color, nl=False)
+ self.file.flush()
+
+ def make_step(self, n_steps):
+ self.pos += n_steps
+ if self.length_known and self.pos >= self.length:
+ self.finished = True
+
+ if (time.time() - self.last_eta) < 1.0:
+ return
+
+ self.last_eta = time.time()
+
+ # self.avg is a rolling list of length <= 7 of steps where steps are
+ # defined as time elapsed divided by the total progress through
+ # self.length.
+ if self.pos:
+ step = (time.time() - self.start) / self.pos
+ else:
+ step = time.time() - self.start
+
+ self.avg = self.avg[-6:] + [step]
+
+ self.eta_known = self.length_known
+
+ def update(self, n_steps):
+ self.make_step(n_steps)
+ self.render_progress()
+
+ def finish(self):
+ self.eta_known = 0
+ self.current_item = None
+ self.finished = True
+
+ def generator(self):
+ """Return a generator which yields the items added to the bar
+ during construction, and updates the progress bar *after* the
+ yielded block returns.
+ """
+ # WARNING: the iterator interface for `ProgressBar` relies on
+ # this and only works because this is a simple generator which
+ # doesn't create or manage additional state. If this function
+ # changes, the impact should be evaluated both against
+ # `iter(bar)` and `next(bar)`. `next()` in particular may call
+ # `self.generator()` repeatedly, and this must remain safe in
+ # order for that interface to work.
+ if not self.entered:
+ raise RuntimeError("You need to use progress bars in a with block.")
+
+ if self.is_hidden:
+ for rv in self.iter:
+ yield rv
+ else:
+ for rv in self.iter:
+ self.current_item = rv
+ yield rv
+ self.update(1)
+ self.finish()
+ self.render_progress()
+
+
+def pager(generator, color=None):
+ """Decide what method to use for paging through text."""
+ stdout = _default_text_stdout()
+ if not isatty(sys.stdin) or not isatty(stdout):
+ return _nullpager(stdout, generator, color)
+ pager_cmd = (os.environ.get("PAGER", None) or "").strip()
+ if pager_cmd:
+ if WIN:
+ return _tempfilepager(generator, pager_cmd, color)
+ return _pipepager(generator, pager_cmd, color)
+ if os.environ.get("TERM") in ("dumb", "emacs"):
+ return _nullpager(stdout, generator, color)
+ if WIN or sys.platform.startswith("os2"):
+ return _tempfilepager(generator, "more <", color)
+ if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0:
+ return _pipepager(generator, "less", color)
+
+ import tempfile
+
+ fd, filename = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ if hasattr(os, "system") and os.system('more "{}"'.format(filename)) == 0:
+ return _pipepager(generator, "more", color)
+ return _nullpager(stdout, generator, color)
+ finally:
+ os.unlink(filename)
+
+
+def _pipepager(generator, cmd, color):
+ """Page through text by feeding it to another program. Invoking a
+ pager through this might support colors.
+ """
+ import subprocess
+
+ env = dict(os.environ)
+
+ # If we're piping to less we might support colors under the
+ # condition that
+ cmd_detail = cmd.rsplit("/", 1)[-1].split()
+ if color is None and cmd_detail[0] == "less":
+ less_flags = "{}{}".format(os.environ.get("LESS", ""), " ".join(cmd_detail[1:]))
+ if not less_flags:
+ env["LESS"] = "-R"
+ color = True
+ elif "r" in less_flags or "R" in less_flags:
+ color = True
+
+ c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env)
+ encoding = get_best_encoding(c.stdin)
+ try:
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+
+ c.stdin.write(text.encode(encoding, "replace"))
+ except (IOError, KeyboardInterrupt):
+ pass
+ else:
+ c.stdin.close()
+
+ # Less doesn't respect ^C, but catches it for its own UI purposes (aborting
+ # search or other commands inside less).
+ #
+ # That means when the user hits ^C, the parent process (click) terminates,
+ # but less is still alive, paging the output and messing up the terminal.
+ #
+ # If the user wants to make the pager exit on ^C, they should set
+ # `LESS='-K'`. It's not our decision to make.
+ while True:
+ try:
+ c.wait()
+ except KeyboardInterrupt:
+ pass
+ else:
+ break
+
+
+def _tempfilepager(generator, cmd, color):
+ """Page through text by invoking a program on a temporary file."""
+ import tempfile
+
+ filename = tempfile.mktemp()
+ # TODO: This never terminates if the passed generator never terminates.
+ text = "".join(generator)
+ if not color:
+ text = strip_ansi(text)
+ encoding = get_best_encoding(sys.stdout)
+ with open_stream(filename, "wb")[0] as f:
+ f.write(text.encode(encoding))
+ try:
+ os.system('{} "{}"'.format(cmd, filename))
+ finally:
+ os.unlink(filename)
+
+
+def _nullpager(stream, generator, color):
+ """Simply print unformatted text. This is the ultimate fallback."""
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+ stream.write(text)
+
+
+class Editor(object):
+ def __init__(self, editor=None, env=None, require_save=True, extension=".txt"):
+ self.editor = editor
+ self.env = env
+ self.require_save = require_save
+ self.extension = extension
+
+ def get_editor(self):
+ if self.editor is not None:
+ return self.editor
+ for key in "VISUAL", "EDITOR":
+ rv = os.environ.get(key)
+ if rv:
+ return rv
+ if WIN:
+ return "notepad"
+ for editor in "sensible-editor", "vim", "nano":
+ if os.system("which {} >/dev/null 2>&1".format(editor)) == 0:
+ return editor
+ return "vi"
+
+ def edit_file(self, filename):
+ import subprocess
+
+ editor = self.get_editor()
+ if self.env:
+ environ = os.environ.copy()
+ environ.update(self.env)
+ else:
+ environ = None
+ try:
+ c = subprocess.Popen(
+ '{} "{}"'.format(editor, filename), env=environ, shell=True,
+ )
+ exit_code = c.wait()
+ if exit_code != 0:
+ raise ClickException("{}: Editing failed!".format(editor))
+ except OSError as e:
+ raise ClickException("{}: Editing failed: {}".format(editor, e))
+
+ def edit(self, text):
+ import tempfile
+
+ text = text or ""
+ if text and not text.endswith("\n"):
+ text += "\n"
+
+ fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
+ try:
+ if WIN:
+ encoding = "utf-8-sig"
+ text = text.replace("\n", "\r\n")
+ else:
+ encoding = "utf-8"
+ text = text.encode(encoding)
+
+ f = os.fdopen(fd, "wb")
+ f.write(text)
+ f.close()
+ timestamp = os.path.getmtime(name)
+
+ self.edit_file(name)
+
+ if self.require_save and os.path.getmtime(name) == timestamp:
+ return None
+
+ f = open(name, "rb")
+ try:
+ rv = f.read()
+ finally:
+ f.close()
+ return rv.decode("utf-8-sig").replace("\r\n", "\n")
+ finally:
+ os.unlink(name)
+
+
+def open_url(url, wait=False, locate=False):
+ import subprocess
+
+ def _unquote_file(url):
+ try:
+ import urllib
+ except ImportError:
+ import urllib
+ if url.startswith("file://"):
+ url = urllib.unquote(url[7:])
+ return url
+
+ if sys.platform == "darwin":
+ args = ["open"]
+ if wait:
+ args.append("-W")
+ if locate:
+ args.append("-R")
+ args.append(_unquote_file(url))
+ null = open("/dev/null", "w")
+ try:
+ return subprocess.Popen(args, stderr=null).wait()
+ finally:
+ null.close()
+ elif WIN:
+ if locate:
+ url = _unquote_file(url)
+ args = 'explorer /select,"{}"'.format(_unquote_file(url.replace('"', "")))
+ else:
+ args = 'start {} "" "{}"'.format(
+ "/WAIT" if wait else "", url.replace('"', "")
+ )
+ return os.system(args)
+ elif CYGWIN:
+ if locate:
+ url = _unquote_file(url)
+ args = 'cygstart "{}"'.format(os.path.dirname(url).replace('"', ""))
+ else:
+ args = 'cygstart {} "{}"'.format("-w" if wait else "", url.replace('"', ""))
+ return os.system(args)
+
+ try:
+ if locate:
+ url = os.path.dirname(_unquote_file(url)) or "."
+ else:
+ url = _unquote_file(url)
+ c = subprocess.Popen(["xdg-open", url])
+ if wait:
+ return c.wait()
+ return 0
+ except OSError:
+ if url.startswith(("http://", "https://")) and not locate and not wait:
+ import webbrowser
+
+ webbrowser.open(url)
+ return 0
+ return 1
+
+
+def _translate_ch_to_exc(ch):
+ if ch == u"\x03":
+ raise KeyboardInterrupt()
+ if ch == u"\x04" and not WIN: # Unix-like, Ctrl+D
+ raise EOFError()
+ if ch == u"\x1a" and WIN: # Windows, Ctrl+Z
+ raise EOFError()
+
+
+if WIN:
+ import msvcrt
+
+ @contextlib.contextmanager
+ def raw_terminal():
+ yield
+
+ def getchar(echo):
+ # The function `getch` will return a bytes object corresponding to
+ # the pressed character. Since Windows 10 build 1803, it will also
+ # return \x00 when called a second time after pressing a regular key.
+ #
+ # `getwch` does not share this probably-bugged behavior. Moreover, it
+ # returns a Unicode object by default, which is what we want.
+ #
+ # Either of these functions will return \x00 or \xe0 to indicate
+ # a special key, and you need to call the same function again to get
+ # the "rest" of the code. The fun part is that \u00e0 is
+ # "latin small letter a with grave", so if you type that on a French
+ # keyboard, you _also_ get a \xe0.
+ # E.g., consider the Up arrow. This returns \xe0 and then \x48. The
+ # resulting Unicode string reads as "a with grave" + "capital H".
+ # This is indistinguishable from when the user actually types
+ # "a with grave" and then "capital H".
+ #
+ # When \xe0 is returned, we assume it's part of a special-key sequence
+ # and call `getwch` again, but that means that when the user types
+ # the \u00e0 character, `getchar` doesn't return until a second
+ # character is typed.
+ # The alternative is returning immediately, but that would mess up
+ # cross-platform handling of arrow keys and others that start with
+ # \xe0. Another option is using `getch`, but then we can't reliably
+ # read non-ASCII characters, because return values of `getch` are
+ # limited to the current 8-bit codepage.
+ #
+ # Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
+ # is doing the right thing in more situations than with `getch`.
+ if echo:
+ func = msvcrt.getwche
+ else:
+ func = msvcrt.getwch
+
+ rv = func()
+ if rv in (u"\x00", u"\xe0"):
+ # \x00 and \xe0 are control characters that indicate special key,
+ # see above.
+ rv += func()
+ _translate_ch_to_exc(rv)
+ return rv
+
+
+else:
+ import tty
+ import termios
+
+ @contextlib.contextmanager
+ def raw_terminal():
+ if not isatty(sys.stdin):
+ f = open("/dev/tty")
+ fd = f.fileno()
+ else:
+ fd = sys.stdin.fileno()
+ f = None
+ try:
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(fd)
+ yield fd
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ sys.stdout.flush()
+ if f is not None:
+ f.close()
+ except termios.error:
+ pass
+
+ def getchar(echo):
+ with raw_terminal() as fd:
+ ch = os.read(fd, 32)
+ ch = ch.decode(get_best_encoding(sys.stdin), "replace")
+ if echo and isatty(sys.stdout):
+ sys.stdout.write(ch)
+ _translate_ch_to_exc(ch)
+ return ch
diff --git a/matteo_env/Lib/site-packages/click/_textwrap.py b/matteo_env/Lib/site-packages/click/_textwrap.py
new file mode 100644
index 0000000..6959087
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/_textwrap.py
@@ -0,0 +1,37 @@
+import textwrap
+from contextlib import contextmanager
+
+
+class TextWrapper(textwrap.TextWrapper):
+ def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
+ space_left = max(width - cur_len, 1)
+
+ if self.break_long_words:
+ last = reversed_chunks[-1]
+ cut = last[:space_left]
+ res = last[space_left:]
+ cur_line.append(cut)
+ reversed_chunks[-1] = res
+ elif not cur_line:
+ cur_line.append(reversed_chunks.pop())
+
+ @contextmanager
+ def extra_indent(self, indent):
+ old_initial_indent = self.initial_indent
+ old_subsequent_indent = self.subsequent_indent
+ self.initial_indent += indent
+ self.subsequent_indent += indent
+ try:
+ yield
+ finally:
+ self.initial_indent = old_initial_indent
+ self.subsequent_indent = old_subsequent_indent
+
+ def indent_only(self, text):
+ rv = []
+ for idx, line in enumerate(text.splitlines()):
+ indent = self.initial_indent
+ if idx > 0:
+ indent = self.subsequent_indent
+ rv.append(indent + line)
+ return "\n".join(rv)
diff --git a/matteo_env/Lib/site-packages/click/_unicodefun.py b/matteo_env/Lib/site-packages/click/_unicodefun.py
new file mode 100644
index 0000000..781c365
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/_unicodefun.py
@@ -0,0 +1,131 @@
+import codecs
+import os
+import sys
+
+from ._compat import PY2
+
+
+def _find_unicode_literals_frame():
+ import __future__
+
+ if not hasattr(sys, "_getframe"): # not all Python implementations have it
+ return 0
+ frm = sys._getframe(1)
+ idx = 1
+ while frm is not None:
+ if frm.f_globals.get("__name__", "").startswith("click."):
+ frm = frm.f_back
+ idx += 1
+ elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag:
+ return idx
+ else:
+ break
+ return 0
+
+
+def _check_for_unicode_literals():
+ if not __debug__:
+ return
+
+ from . import disable_unicode_literals_warning
+
+ if not PY2 or disable_unicode_literals_warning:
+ return
+ bad_frame = _find_unicode_literals_frame()
+ if bad_frame <= 0:
+ return
+ from warnings import warn
+
+ warn(
+ Warning(
+ "Click detected the use of the unicode_literals __future__"
+ " import. This is heavily discouraged because it can"
+ " introduce subtle bugs in your code. You should instead"
+ ' use explicit u"" literals for your unicode strings. For'
+ " more information see"
+ " https://click.palletsprojects.com/python3/"
+ ),
+ stacklevel=bad_frame,
+ )
+
+
+def _verify_python3_env():
+ """Ensures that the environment is good for unicode on Python 3."""
+ if PY2:
+ return
+ try:
+ import locale
+
+ fs_enc = codecs.lookup(locale.getpreferredencoding()).name
+ except Exception:
+ fs_enc = "ascii"
+ if fs_enc != "ascii":
+ return
+
+ extra = ""
+ if os.name == "posix":
+ import subprocess
+
+ try:
+ rv = subprocess.Popen(
+ ["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ ).communicate()[0]
+ except OSError:
+ rv = b""
+ good_locales = set()
+ has_c_utf8 = False
+
+ # Make sure we're operating on text here.
+ if isinstance(rv, bytes):
+ rv = rv.decode("ascii", "replace")
+
+ for line in rv.splitlines():
+ locale = line.strip()
+ if locale.lower().endswith((".utf-8", ".utf8")):
+ good_locales.add(locale)
+ if locale.lower() in ("c.utf8", "c.utf-8"):
+ has_c_utf8 = True
+
+ extra += "\n\n"
+ if not good_locales:
+ extra += (
+ "Additional information: on this system no suitable"
+ " UTF-8 locales were discovered. This most likely"
+ " requires resolving by reconfiguring the locale"
+ " system."
+ )
+ elif has_c_utf8:
+ extra += (
+ "This system supports the C.UTF-8 locale which is"
+ " recommended. You might be able to resolve your issue"
+ " by exporting the following environment variables:\n\n"
+ " export LC_ALL=C.UTF-8\n"
+ " export LANG=C.UTF-8"
+ )
+ else:
+ extra += (
+ "This system lists a couple of UTF-8 supporting locales"
+ " that you can pick from. The following suitable"
+ " locales were discovered: {}".format(", ".join(sorted(good_locales)))
+ )
+
+ bad_locale = None
+ for locale in os.environ.get("LC_ALL"), os.environ.get("LANG"):
+ if locale and locale.lower().endswith((".utf-8", ".utf8")):
+ bad_locale = locale
+ if locale is not None:
+ break
+ if bad_locale is not None:
+ extra += (
+ "\n\nClick discovered that you exported a UTF-8 locale"
+ " but the locale system could not pick up from it"
+ " because it does not exist. The exported locale is"
+ " '{}' but it is not supported".format(bad_locale)
+ )
+
+ raise RuntimeError(
+ "Click will abort further execution because Python 3 was"
+ " configured to use ASCII as encoding for the environment."
+ " Consult https://click.palletsprojects.com/python3/ for"
+ " mitigation steps.{}".format(extra)
+ )
diff --git a/matteo_env/Lib/site-packages/click/_winconsole.py b/matteo_env/Lib/site-packages/click/_winconsole.py
new file mode 100644
index 0000000..b6c4274
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/_winconsole.py
@@ -0,0 +1,370 @@
+# -*- coding: utf-8 -*-
+# This module is based on the excellent work by Adam Bartoš who
+# provided a lot of what went into the implementation here in
+# the discussion to issue1602 in the Python bug tracker.
+#
+# There are some general differences in regards to how this works
+# compared to the original patches as we do not need to patch
+# the entire interpreter but just work in our little world of
+# echo and prmopt.
+import ctypes
+import io
+import os
+import sys
+import time
+import zlib
+from ctypes import byref
+from ctypes import c_char
+from ctypes import c_char_p
+from ctypes import c_int
+from ctypes import c_ssize_t
+from ctypes import c_ulong
+from ctypes import c_void_p
+from ctypes import POINTER
+from ctypes import py_object
+from ctypes import windll
+from ctypes import WinError
+from ctypes import WINFUNCTYPE
+from ctypes.wintypes import DWORD
+from ctypes.wintypes import HANDLE
+from ctypes.wintypes import LPCWSTR
+from ctypes.wintypes import LPWSTR
+
+import msvcrt
+
+from ._compat import _NonClosingTextIOWrapper
+from ._compat import PY2
+from ._compat import text_type
+
+try:
+ from ctypes import pythonapi
+
+ PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
+ PyBuffer_Release = pythonapi.PyBuffer_Release
+except ImportError:
+ pythonapi = None
+
+
+c_ssize_p = POINTER(c_ssize_t)
+
+kernel32 = windll.kernel32
+GetStdHandle = kernel32.GetStdHandle
+ReadConsoleW = kernel32.ReadConsoleW
+WriteConsoleW = kernel32.WriteConsoleW
+GetConsoleMode = kernel32.GetConsoleMode
+GetLastError = kernel32.GetLastError
+GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
+CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
+ ("CommandLineToArgvW", windll.shell32)
+)
+LocalFree = WINFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)(
+ ("LocalFree", windll.kernel32)
+)
+
+
+STDIN_HANDLE = GetStdHandle(-10)
+STDOUT_HANDLE = GetStdHandle(-11)
+STDERR_HANDLE = GetStdHandle(-12)
+
+
+PyBUF_SIMPLE = 0
+PyBUF_WRITABLE = 1
+
+ERROR_SUCCESS = 0
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_OPERATION_ABORTED = 995
+
+STDIN_FILENO = 0
+STDOUT_FILENO = 1
+STDERR_FILENO = 2
+
+EOF = b"\x1a"
+MAX_BYTES_WRITTEN = 32767
+
+
+class Py_buffer(ctypes.Structure):
+ _fields_ = [
+ ("buf", c_void_p),
+ ("obj", py_object),
+ ("len", c_ssize_t),
+ ("itemsize", c_ssize_t),
+ ("readonly", c_int),
+ ("ndim", c_int),
+ ("format", c_char_p),
+ ("shape", c_ssize_p),
+ ("strides", c_ssize_p),
+ ("suboffsets", c_ssize_p),
+ ("internal", c_void_p),
+ ]
+
+ if PY2:
+ _fields_.insert(-1, ("smalltable", c_ssize_t * 2))
+
+
+# On PyPy we cannot get buffers so our ability to operate here is
+# serverly limited.
+if pythonapi is None:
+ get_buffer = None
+else:
+
+ def get_buffer(obj, writable=False):
+ buf = Py_buffer()
+ flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
+ PyObject_GetBuffer(py_object(obj), byref(buf), flags)
+ try:
+ buffer_type = c_char * buf.len
+ return buffer_type.from_address(buf.buf)
+ finally:
+ PyBuffer_Release(byref(buf))
+
+
+class _WindowsConsoleRawIOBase(io.RawIOBase):
+ def __init__(self, handle):
+ self.handle = handle
+
+ def isatty(self):
+ io.RawIOBase.isatty(self)
+ return True
+
+
+class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
+ def readable(self):
+ return True
+
+ def readinto(self, b):
+ bytes_to_be_read = len(b)
+ if not bytes_to_be_read:
+ return 0
+ elif bytes_to_be_read % 2:
+ raise ValueError(
+ "cannot read odd number of bytes from UTF-16-LE encoded console"
+ )
+
+ buffer = get_buffer(b, writable=True)
+ code_units_to_be_read = bytes_to_be_read // 2
+ code_units_read = c_ulong()
+
+ rv = ReadConsoleW(
+ HANDLE(self.handle),
+ buffer,
+ code_units_to_be_read,
+ byref(code_units_read),
+ None,
+ )
+ if GetLastError() == ERROR_OPERATION_ABORTED:
+ # wait for KeyboardInterrupt
+ time.sleep(0.1)
+ if not rv:
+ raise OSError("Windows error: {}".format(GetLastError()))
+
+ if buffer[0] == EOF:
+ return 0
+ return 2 * code_units_read.value
+
+
+class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
+ def writable(self):
+ return True
+
+ @staticmethod
+ def _get_error_message(errno):
+ if errno == ERROR_SUCCESS:
+ return "ERROR_SUCCESS"
+ elif errno == ERROR_NOT_ENOUGH_MEMORY:
+ return "ERROR_NOT_ENOUGH_MEMORY"
+ return "Windows error {}".format(errno)
+
+ def write(self, b):
+ bytes_to_be_written = len(b)
+ buf = get_buffer(b)
+ code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
+ code_units_written = c_ulong()
+
+ WriteConsoleW(
+ HANDLE(self.handle),
+ buf,
+ code_units_to_be_written,
+ byref(code_units_written),
+ None,
+ )
+ bytes_written = 2 * code_units_written.value
+
+ if bytes_written == 0 and bytes_to_be_written > 0:
+ raise OSError(self._get_error_message(GetLastError()))
+ return bytes_written
+
+
+class ConsoleStream(object):
+ def __init__(self, text_stream, byte_stream):
+ self._text_stream = text_stream
+ self.buffer = byte_stream
+
+ @property
+ def name(self):
+ return self.buffer.name
+
+ def write(self, x):
+ if isinstance(x, text_type):
+ return self._text_stream.write(x)
+ try:
+ self.flush()
+ except Exception:
+ pass
+ return self.buffer.write(x)
+
+ def writelines(self, lines):
+ for line in lines:
+ self.write(line)
+
+ def __getattr__(self, name):
+ return getattr(self._text_stream, name)
+
+ def isatty(self):
+ return self.buffer.isatty()
+
+ def __repr__(self):
+ return "".format(
+ self.name, self.encoding
+ )
+
+
+class WindowsChunkedWriter(object):
+ """
+ Wraps a stream (such as stdout), acting as a transparent proxy for all
+ attribute access apart from method 'write()' which we wrap to write in
+ limited chunks due to a Windows limitation on binary console streams.
+ """
+
+ def __init__(self, wrapped):
+ # double-underscore everything to prevent clashes with names of
+ # attributes on the wrapped stream object.
+ self.__wrapped = wrapped
+
+ def __getattr__(self, name):
+ return getattr(self.__wrapped, name)
+
+ def write(self, text):
+ total_to_write = len(text)
+ written = 0
+
+ while written < total_to_write:
+ to_write = min(total_to_write - written, MAX_BYTES_WRITTEN)
+ self.__wrapped.write(text[written : written + to_write])
+ written += to_write
+
+
+_wrapped_std_streams = set()
+
+
+def _wrap_std_stream(name):
+ # Python 2 & Windows 7 and below
+ if (
+ PY2
+ and sys.getwindowsversion()[:2] <= (6, 1)
+ and name not in _wrapped_std_streams
+ ):
+ setattr(sys, name, WindowsChunkedWriter(getattr(sys, name)))
+ _wrapped_std_streams.add(name)
+
+
+def _get_text_stdin(buffer_stream):
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return ConsoleStream(text_stream, buffer_stream)
+
+
+def _get_text_stdout(buffer_stream):
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return ConsoleStream(text_stream, buffer_stream)
+
+
+def _get_text_stderr(buffer_stream):
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return ConsoleStream(text_stream, buffer_stream)
+
+
+if PY2:
+
+ def _hash_py_argv():
+ return zlib.crc32("\x00".join(sys.argv[1:]))
+
+ _initial_argv_hash = _hash_py_argv()
+
+ def _get_windows_argv():
+ argc = c_int(0)
+ argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
+ if not argv_unicode:
+ raise WinError()
+ try:
+ argv = [argv_unicode[i] for i in range(0, argc.value)]
+ finally:
+ LocalFree(argv_unicode)
+ del argv_unicode
+
+ if not hasattr(sys, "frozen"):
+ argv = argv[1:]
+ while len(argv) > 0:
+ arg = argv[0]
+ if not arg.startswith("-") or arg == "-":
+ break
+ argv = argv[1:]
+ if arg.startswith(("-c", "-m")):
+ break
+
+ return argv[1:]
+
+
+_stream_factories = {
+ 0: _get_text_stdin,
+ 1: _get_text_stdout,
+ 2: _get_text_stderr,
+}
+
+
+def _is_console(f):
+ if not hasattr(f, "fileno"):
+ return False
+
+ try:
+ fileno = f.fileno()
+ except OSError:
+ return False
+
+ handle = msvcrt.get_osfhandle(fileno)
+ return bool(GetConsoleMode(handle, byref(DWORD())))
+
+
+def _get_windows_console_stream(f, encoding, errors):
+ if (
+ get_buffer is not None
+ and encoding in ("utf-16-le", None)
+ and errors in ("strict", None)
+ and _is_console(f)
+ ):
+ func = _stream_factories.get(f.fileno())
+ if func is not None:
+ if not PY2:
+ f = getattr(f, "buffer", None)
+ if f is None:
+ return None
+ else:
+ # If we are on Python 2 we need to set the stream that we
+ # deal with to binary mode as otherwise the exercise if a
+ # bit moot. The same problems apply as for
+ # get_binary_stdin and friends from _compat.
+ msvcrt.setmode(f.fileno(), os.O_BINARY)
+ return func(f)
diff --git a/matteo_env/Lib/site-packages/click/core.py b/matteo_env/Lib/site-packages/click/core.py
new file mode 100644
index 0000000..f58bf26
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/core.py
@@ -0,0 +1,2030 @@
+import errno
+import inspect
+import os
+import sys
+from contextlib import contextmanager
+from functools import update_wrapper
+from itertools import repeat
+
+from ._compat import isidentifier
+from ._compat import iteritems
+from ._compat import PY2
+from ._compat import string_types
+from ._unicodefun import _check_for_unicode_literals
+from ._unicodefun import _verify_python3_env
+from .exceptions import Abort
+from .exceptions import BadParameter
+from .exceptions import ClickException
+from .exceptions import Exit
+from .exceptions import MissingParameter
+from .exceptions import UsageError
+from .formatting import HelpFormatter
+from .formatting import join_options
+from .globals import pop_context
+from .globals import push_context
+from .parser import OptionParser
+from .parser import split_opt
+from .termui import confirm
+from .termui import prompt
+from .termui import style
+from .types import BOOL
+from .types import convert_type
+from .types import IntRange
+from .utils import echo
+from .utils import get_os_args
+from .utils import make_default_short_help
+from .utils import make_str
+from .utils import PacifyFlushWrapper
+
+_missing = object()
+
+SUBCOMMAND_METAVAR = "COMMAND [ARGS]..."
+SUBCOMMANDS_METAVAR = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..."
+
+DEPRECATED_HELP_NOTICE = " (DEPRECATED)"
+DEPRECATED_INVOKE_NOTICE = "DeprecationWarning: The command %(name)s is deprecated."
+
+
+def _maybe_show_deprecated_notice(cmd):
+ if cmd.deprecated:
+ echo(style(DEPRECATED_INVOKE_NOTICE % {"name": cmd.name}, fg="red"), err=True)
+
+
+def fast_exit(code):
+ """Exit without garbage collection, this speeds up exit by about 10ms for
+ things like bash completion.
+ """
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(code)
+
+
+def _bashcomplete(cmd, prog_name, complete_var=None):
+ """Internal handler for the bash completion support."""
+ if complete_var is None:
+ complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper())
+ complete_instr = os.environ.get(complete_var)
+ if not complete_instr:
+ return
+
+ from ._bashcomplete import bashcomplete
+
+ if bashcomplete(cmd, prog_name, complete_var, complete_instr):
+ fast_exit(1)
+
+
+def _check_multicommand(base_command, cmd_name, cmd, register=False):
+ if not base_command.chain or not isinstance(cmd, MultiCommand):
+ return
+ if register:
+ hint = (
+ "It is not possible to add multi commands as children to"
+ " another multi command that is in chain mode."
+ )
+ else:
+ hint = (
+ "Found a multi command as subcommand to a multi command"
+ " that is in chain mode. This is not supported."
+ )
+ raise RuntimeError(
+ "{}. Command '{}' is set to chain and '{}' was added as"
+ " subcommand but it in itself is a multi command. ('{}' is a {}"
+ " within a chained {} named '{}').".format(
+ hint,
+ base_command.name,
+ cmd_name,
+ cmd_name,
+ cmd.__class__.__name__,
+ base_command.__class__.__name__,
+ base_command.name,
+ )
+ )
+
+
+def batch(iterable, batch_size):
+ return list(zip(*repeat(iter(iterable), batch_size)))
+
+
+def invoke_param_callback(callback, ctx, param, value):
+ code = getattr(callback, "__code__", None)
+ args = getattr(code, "co_argcount", 3)
+
+ if args < 3:
+ from warnings import warn
+
+ warn(
+ "Parameter callbacks take 3 args, (ctx, param, value). The"
+ " 2-arg style is deprecated and will be removed in 8.0.".format(callback),
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ return callback(ctx, value)
+
+ return callback(ctx, param, value)
+
+
+@contextmanager
+def augment_usage_errors(ctx, param=None):
+ """Context manager that attaches extra information to exceptions."""
+ try:
+ yield
+ except BadParameter as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ if param is not None and e.param is None:
+ e.param = param
+ raise
+ except UsageError as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ raise
+
+
+def iter_params_for_processing(invocation_order, declaration_order):
+ """Given a sequence of parameters in the order as should be considered
+ for processing and an iterable of parameters that exist, this returns
+ a list in the correct order as they should be processed.
+ """
+
+ def sort_key(item):
+ try:
+ idx = invocation_order.index(item)
+ except ValueError:
+ idx = float("inf")
+ return (not item.is_eager, idx)
+
+ return sorted(declaration_order, key=sort_key)
+
+
+class Context(object):
+ """The context is a special internal object that holds state relevant
+ for the script execution at every single level. It's normally invisible
+ to commands unless they opt-in to getting access to it.
+
+ The context is useful as it can pass internal objects around and can
+ control special execution features such as reading data from
+ environment variables.
+
+ A context can be used as context manager in which case it will call
+ :meth:`close` on teardown.
+
+ .. versionadded:: 2.0
+ Added the `resilient_parsing`, `help_option_names`,
+ `token_normalize_func` parameters.
+
+ .. versionadded:: 3.0
+ Added the `allow_extra_args` and `allow_interspersed_args`
+ parameters.
+
+ .. versionadded:: 4.0
+ Added the `color`, `ignore_unknown_options`, and
+ `max_content_width` parameters.
+
+ .. versionadded:: 7.1
+ Added the `show_default` parameter.
+
+ :param command: the command class for this context.
+ :param parent: the parent context.
+ :param info_name: the info name for this invocation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it is usually
+ the name of the script, for commands below it it's
+ the name of the script.
+ :param obj: an arbitrary object of user data.
+ :param auto_envvar_prefix: the prefix to use for automatic environment
+ variables. If this is `None` then reading
+ from environment variables is disabled. This
+ does not affect manually set environment
+ variables which are always read.
+ :param default_map: a dictionary (like object) with default values
+ for parameters.
+ :param terminal_width: the width of the terminal. The default is
+ inherit from parent context. If no context
+ defines the terminal width then auto
+ detection will be applied.
+ :param max_content_width: the maximum width for content rendered by
+ Click (this currently only affects help
+ pages). This defaults to 80 characters if
+ not overridden. In other words: even if the
+ terminal is larger than that, Click will not
+ format things wider than 80 characters by
+ default. In addition to that, formatters might
+ add some safety mapping on the right.
+ :param resilient_parsing: if this flag is enabled then Click will
+ parse without any interactivity or callback
+ invocation. Default values will also be
+ ignored. This is useful for implementing
+ things such as completion support.
+ :param allow_extra_args: if this is set to `True` then extra arguments
+ at the end will not raise an error and will be
+ kept on the context. The default is to inherit
+ from the command.
+ :param allow_interspersed_args: if this is set to `False` then options
+ and arguments cannot be mixed. The
+ default is to inherit from the command.
+ :param ignore_unknown_options: instructs click to ignore options it does
+ not know and keeps them for later
+ processing.
+ :param help_option_names: optionally a list of strings that define how
+ the default help parameter is named. The
+ default is ``['--help']``.
+ :param token_normalize_func: an optional function that is used to
+ normalize tokens (options, choices,
+ etc.). This for instance can be used to
+ implement case insensitive behavior.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are used in texts that Click prints which is by
+ default not the case. This for instance would affect
+ help output.
+ :param show_default: if True, shows defaults for all options.
+ Even if an option is later created with show_default=False,
+ this command-level setting overrides it.
+ """
+
+ def __init__(
+ self,
+ command,
+ parent=None,
+ info_name=None,
+ obj=None,
+ auto_envvar_prefix=None,
+ default_map=None,
+ terminal_width=None,
+ max_content_width=None,
+ resilient_parsing=False,
+ allow_extra_args=None,
+ allow_interspersed_args=None,
+ ignore_unknown_options=None,
+ help_option_names=None,
+ token_normalize_func=None,
+ color=None,
+ show_default=None,
+ ):
+ #: the parent context or `None` if none exists.
+ self.parent = parent
+ #: the :class:`Command` for this context.
+ self.command = command
+ #: the descriptive information name
+ self.info_name = info_name
+ #: the parsed parameters except if the value is hidden in which
+ #: case it's not remembered.
+ self.params = {}
+ #: the leftover arguments.
+ self.args = []
+ #: protected arguments. These are arguments that are prepended
+ #: to `args` when certain parsing scenarios are encountered but
+ #: must be never propagated to another arguments. This is used
+ #: to implement nested parsing.
+ self.protected_args = []
+ if obj is None and parent is not None:
+ obj = parent.obj
+ #: the user object stored.
+ self.obj = obj
+ self._meta = getattr(parent, "meta", {})
+
+ #: A dictionary (-like object) with defaults for parameters.
+ if (
+ default_map is None
+ and parent is not None
+ and parent.default_map is not None
+ ):
+ default_map = parent.default_map.get(info_name)
+ self.default_map = default_map
+
+ #: This flag indicates if a subcommand is going to be executed. A
+ #: group callback can use this information to figure out if it's
+ #: being executed directly or because the execution flow passes
+ #: onwards to a subcommand. By default it's None, but it can be
+ #: the name of the subcommand to execute.
+ #:
+ #: If chaining is enabled this will be set to ``'*'`` in case
+ #: any commands are executed. It is however not possible to
+ #: figure out which ones. If you require this knowledge you
+ #: should use a :func:`resultcallback`.
+ self.invoked_subcommand = None
+
+ if terminal_width is None and parent is not None:
+ terminal_width = parent.terminal_width
+ #: The width of the terminal (None is autodetection).
+ self.terminal_width = terminal_width
+
+ if max_content_width is None and parent is not None:
+ max_content_width = parent.max_content_width
+ #: The maximum width of formatted content (None implies a sensible
+ #: default which is 80 for most things).
+ self.max_content_width = max_content_width
+
+ if allow_extra_args is None:
+ allow_extra_args = command.allow_extra_args
+ #: Indicates if the context allows extra args or if it should
+ #: fail on parsing.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_extra_args = allow_extra_args
+
+ if allow_interspersed_args is None:
+ allow_interspersed_args = command.allow_interspersed_args
+ #: Indicates if the context allows mixing of arguments and
+ #: options or not.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_interspersed_args = allow_interspersed_args
+
+ if ignore_unknown_options is None:
+ ignore_unknown_options = command.ignore_unknown_options
+ #: Instructs click to ignore options that a command does not
+ #: understand and will store it on the context for later
+ #: processing. This is primarily useful for situations where you
+ #: want to call into external programs. Generally this pattern is
+ #: strongly discouraged because it's not possibly to losslessly
+ #: forward all arguments.
+ #:
+ #: .. versionadded:: 4.0
+ self.ignore_unknown_options = ignore_unknown_options
+
+ if help_option_names is None:
+ if parent is not None:
+ help_option_names = parent.help_option_names
+ else:
+ help_option_names = ["--help"]
+
+ #: The names for the help options.
+ self.help_option_names = help_option_names
+
+ if token_normalize_func is None and parent is not None:
+ token_normalize_func = parent.token_normalize_func
+
+ #: An optional normalization function for tokens. This is
+ #: options, choices, commands etc.
+ self.token_normalize_func = token_normalize_func
+
+ #: Indicates if resilient parsing is enabled. In that case Click
+ #: will do its best to not cause any failures and default values
+ #: will be ignored. Useful for completion.
+ self.resilient_parsing = resilient_parsing
+
+ # If there is no envvar prefix yet, but the parent has one and
+ # the command on this level has a name, we can expand the envvar
+ # prefix automatically.
+ if auto_envvar_prefix is None:
+ if (
+ parent is not None
+ and parent.auto_envvar_prefix is not None
+ and self.info_name is not None
+ ):
+ auto_envvar_prefix = "{}_{}".format(
+ parent.auto_envvar_prefix, self.info_name.upper()
+ )
+ else:
+ auto_envvar_prefix = auto_envvar_prefix.upper()
+ if auto_envvar_prefix is not None:
+ auto_envvar_prefix = auto_envvar_prefix.replace("-", "_")
+ self.auto_envvar_prefix = auto_envvar_prefix
+
+ if color is None and parent is not None:
+ color = parent.color
+
+ #: Controls if styling output is wanted or not.
+ self.color = color
+
+ self.show_default = show_default
+
+ self._close_callbacks = []
+ self._depth = 0
+
+ def __enter__(self):
+ self._depth += 1
+ push_context(self)
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self._depth -= 1
+ if self._depth == 0:
+ self.close()
+ pop_context()
+
+ @contextmanager
+ def scope(self, cleanup=True):
+ """This helper method can be used with the context object to promote
+ it to the current thread local (see :func:`get_current_context`).
+ The default behavior of this is to invoke the cleanup functions which
+ can be disabled by setting `cleanup` to `False`. The cleanup
+ functions are typically used for things such as closing file handles.
+
+ If the cleanup is intended the context object can also be directly
+ used as a context manager.
+
+ Example usage::
+
+ with ctx.scope():
+ assert get_current_context() is ctx
+
+ This is equivalent::
+
+ with ctx:
+ assert get_current_context() is ctx
+
+ .. versionadded:: 5.0
+
+ :param cleanup: controls if the cleanup functions should be run or
+ not. The default is to run these functions. In
+ some situations the context only wants to be
+ temporarily pushed in which case this can be disabled.
+ Nested pushes automatically defer the cleanup.
+ """
+ if not cleanup:
+ self._depth += 1
+ try:
+ with self as rv:
+ yield rv
+ finally:
+ if not cleanup:
+ self._depth -= 1
+
+ @property
+ def meta(self):
+ """This is a dictionary which is shared with all the contexts
+ that are nested. It exists so that click utilities can store some
+ state here if they need to. It is however the responsibility of
+ that code to manage this dictionary well.
+
+ The keys are supposed to be unique dotted strings. For instance
+ module paths are a good choice for it. What is stored in there is
+ irrelevant for the operation of click. However what is important is
+ that code that places data here adheres to the general semantics of
+ the system.
+
+ Example usage::
+
+ LANG_KEY = f'{__name__}.lang'
+
+ def set_language(value):
+ ctx = get_current_context()
+ ctx.meta[LANG_KEY] = value
+
+ def get_language():
+ return get_current_context().meta.get(LANG_KEY, 'en_US')
+
+ .. versionadded:: 5.0
+ """
+ return self._meta
+
+ def make_formatter(self):
+ """Creates the formatter for the help and usage output."""
+ return HelpFormatter(
+ width=self.terminal_width, max_width=self.max_content_width
+ )
+
+ def call_on_close(self, f):
+ """This decorator remembers a function as callback that should be
+ executed when the context tears down. This is most useful to bind
+ resource handling to the script execution. For instance, file objects
+ opened by the :class:`File` type will register their close callbacks
+ here.
+
+ :param f: the function to execute on teardown.
+ """
+ self._close_callbacks.append(f)
+ return f
+
+ def close(self):
+ """Invokes all close callbacks."""
+ for cb in self._close_callbacks:
+ cb()
+ self._close_callbacks = []
+
+ @property
+ def command_path(self):
+ """The computed command path. This is used for the ``usage``
+ information on the help page. It's automatically created by
+ combining the info names of the chain of contexts to the root.
+ """
+ rv = ""
+ if self.info_name is not None:
+ rv = self.info_name
+ if self.parent is not None:
+ rv = "{} {}".format(self.parent.command_path, rv)
+ return rv.lstrip()
+
+ def find_root(self):
+ """Finds the outermost context."""
+ node = self
+ while node.parent is not None:
+ node = node.parent
+ return node
+
+ def find_object(self, object_type):
+ """Finds the closest object of a given type."""
+ node = self
+ while node is not None:
+ if isinstance(node.obj, object_type):
+ return node.obj
+ node = node.parent
+
+ def ensure_object(self, object_type):
+ """Like :meth:`find_object` but sets the innermost object to a
+ new instance of `object_type` if it does not exist.
+ """
+ rv = self.find_object(object_type)
+ if rv is None:
+ self.obj = rv = object_type()
+ return rv
+
+ def lookup_default(self, name):
+ """Looks up the default for a parameter name. This by default
+ looks into the :attr:`default_map` if available.
+ """
+ if self.default_map is not None:
+ rv = self.default_map.get(name)
+ if callable(rv):
+ rv = rv()
+ return rv
+
+ def fail(self, message):
+ """Aborts the execution of the program with a specific error
+ message.
+
+ :param message: the error message to fail with.
+ """
+ raise UsageError(message, self)
+
+ def abort(self):
+ """Aborts the script."""
+ raise Abort()
+
+ def exit(self, code=0):
+ """Exits the application with a given exit code."""
+ raise Exit(code)
+
+ def get_usage(self):
+ """Helper method to get formatted usage string for the current
+ context and command.
+ """
+ return self.command.get_usage(self)
+
+ def get_help(self):
+ """Helper method to get formatted help page for the current
+ context and command.
+ """
+ return self.command.get_help(self)
+
+ def invoke(*args, **kwargs): # noqa: B902
+ """Invokes a command callback in exactly the way it expects. There
+ are two ways to invoke this method:
+
+ 1. the first argument can be a callback and all other arguments and
+ keyword arguments are forwarded directly to the function.
+ 2. the first argument is a click command object. In that case all
+ arguments are forwarded as well but proper click parameters
+ (options and click arguments) must be keyword arguments and Click
+ will fill in defaults.
+
+ Note that before Click 3.2 keyword arguments were not properly filled
+ in against the intention of this code and no context was created. For
+ more information about this change and why it was done in a bugfix
+ release see :ref:`upgrade-to-3.2`.
+ """
+ self, callback = args[:2]
+ ctx = self
+
+ # It's also possible to invoke another command which might or
+ # might not have a callback. In that case we also fill
+ # in defaults and make a new context for this command.
+ if isinstance(callback, Command):
+ other_cmd = callback
+ callback = other_cmd.callback
+ ctx = Context(other_cmd, info_name=other_cmd.name, parent=self)
+ if callback is None:
+ raise TypeError(
+ "The given command does not have a callback that can be invoked."
+ )
+
+ for param in other_cmd.params:
+ if param.name not in kwargs and param.expose_value:
+ kwargs[param.name] = param.get_default(ctx)
+
+ args = args[2:]
+ with augment_usage_errors(self):
+ with ctx:
+ return callback(*args, **kwargs)
+
+ def forward(*args, **kwargs): # noqa: B902
+ """Similar to :meth:`invoke` but fills in default keyword
+ arguments from the current context if the other command expects
+ it. This cannot invoke callbacks directly, only other commands.
+ """
+ self, cmd = args[:2]
+
+ # It's also possible to invoke another command which might or
+ # might not have a callback.
+ if not isinstance(cmd, Command):
+ raise TypeError("Callback is not a command.")
+
+ for param in self.params:
+ if param not in kwargs:
+ kwargs[param] = self.params[param]
+
+ return self.invoke(cmd, **kwargs)
+
+
+class BaseCommand(object):
+ """The base command implements the minimal API contract of commands.
+ Most code will never use this as it does not implement a lot of useful
+ functionality but it can act as the direct subclass of alternative
+ parsing methods that do not depend on the Click parser.
+
+ For instance, this can be used to bridge Click and other systems like
+ argparse or docopt.
+
+ Because base commands do not implement a lot of the API that other
+ parts of Click take for granted, they are not supported for all
+ operations. For instance, they cannot be used with the decorators
+ usually and they have no built-in callback system.
+
+ .. versionchanged:: 2.0
+ Added the `context_settings` parameter.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ """
+
+ #: the default for the :attr:`Context.allow_extra_args` flag.
+ allow_extra_args = False
+ #: the default for the :attr:`Context.allow_interspersed_args` flag.
+ allow_interspersed_args = True
+ #: the default for the :attr:`Context.ignore_unknown_options` flag.
+ ignore_unknown_options = False
+
+ def __init__(self, name, context_settings=None):
+ #: the name the command thinks it has. Upon registering a command
+ #: on a :class:`Group` the group will default the command name
+ #: with this information. You should instead use the
+ #: :class:`Context`\'s :attr:`~Context.info_name` attribute.
+ self.name = name
+ if context_settings is None:
+ context_settings = {}
+ #: an optional dictionary with defaults passed to the context.
+ self.context_settings = context_settings
+
+ def __repr__(self):
+ return "<{} {}>".format(self.__class__.__name__, self.name)
+
+ def get_usage(self, ctx):
+ raise NotImplementedError("Base commands cannot get usage")
+
+ def get_help(self, ctx):
+ raise NotImplementedError("Base commands cannot get help")
+
+ def make_context(self, info_name, args, parent=None, **extra):
+ """This function when given an info name and arguments will kick
+ off the parsing and create a new :class:`Context`. It does not
+ invoke the actual command callback though.
+
+ :param info_name: the info name for this invokation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it's usually
+ the name of the script, for commands below it it's
+ the name of the script.
+ :param args: the arguments to parse as list of strings.
+ :param parent: the parent context if available.
+ :param extra: extra keyword arguments forwarded to the context
+ constructor.
+ """
+ for key, value in iteritems(self.context_settings):
+ if key not in extra:
+ extra[key] = value
+ ctx = Context(self, info_name=info_name, parent=parent, **extra)
+ with ctx.scope(cleanup=False):
+ self.parse_args(ctx, args)
+ return ctx
+
+ def parse_args(self, ctx, args):
+ """Given a context and a list of arguments this creates the parser
+ and parses the arguments, then modifies the context as necessary.
+ This is automatically invoked by :meth:`make_context`.
+ """
+ raise NotImplementedError("Base commands do not know how to parse arguments.")
+
+ def invoke(self, ctx):
+ """Given a context, this invokes the command. The default
+ implementation is raising a not implemented error.
+ """
+ raise NotImplementedError("Base commands are not invokable by default")
+
+ def main(
+ self,
+ args=None,
+ prog_name=None,
+ complete_var=None,
+ standalone_mode=True,
+ **extra
+ ):
+ """This is the way to invoke a script with all the bells and
+ whistles as a command line application. This will always terminate
+ the application after a call. If this is not wanted, ``SystemExit``
+ needs to be caught.
+
+ This method is also available by directly calling the instance of
+ a :class:`Command`.
+
+ .. versionadded:: 3.0
+ Added the `standalone_mode` flag to control the standalone mode.
+
+ :param args: the arguments that should be used for parsing. If not
+ provided, ``sys.argv[1:]`` is used.
+ :param prog_name: the program name that should be used. By default
+ the program name is constructed by taking the file
+ name from ``sys.argv[0]``.
+ :param complete_var: the environment variable that controls the
+ bash completion support. The default is
+ ``"__COMPLETE"`` with prog_name in
+ uppercase.
+ :param standalone_mode: the default behavior is to invoke the script
+ in standalone mode. Click will then
+ handle exceptions and convert them into
+ error messages and the function will never
+ return but shut down the interpreter. If
+ this is set to `False` they will be
+ propagated to the caller and the return
+ value of this function is the return value
+ of :meth:`invoke`.
+ :param extra: extra keyword arguments are forwarded to the context
+ constructor. See :class:`Context` for more information.
+ """
+ # If we are in Python 3, we will verify that the environment is
+ # sane at this point or reject further execution to avoid a
+ # broken script.
+ if not PY2:
+ _verify_python3_env()
+ else:
+ _check_for_unicode_literals()
+
+ if args is None:
+ args = get_os_args()
+ else:
+ args = list(args)
+
+ if prog_name is None:
+ prog_name = make_str(
+ os.path.basename(sys.argv[0] if sys.argv else __file__)
+ )
+
+ # Hook for the Bash completion. This only activates if the Bash
+ # completion is actually enabled, otherwise this is quite a fast
+ # noop.
+ _bashcomplete(self, prog_name, complete_var)
+
+ try:
+ try:
+ with self.make_context(prog_name, args, **extra) as ctx:
+ rv = self.invoke(ctx)
+ if not standalone_mode:
+ return rv
+ # it's not safe to `ctx.exit(rv)` here!
+ # note that `rv` may actually contain data like "1" which
+ # has obvious effects
+ # more subtle case: `rv=[None, None]` can come out of
+ # chained commands which all returned `None` -- so it's not
+ # even always obvious that `rv` indicates success/failure
+ # by its truthiness/falsiness
+ ctx.exit()
+ except (EOFError, KeyboardInterrupt):
+ echo(file=sys.stderr)
+ raise Abort()
+ except ClickException as e:
+ if not standalone_mode:
+ raise
+ e.show()
+ sys.exit(e.exit_code)
+ except IOError as e:
+ if e.errno == errno.EPIPE:
+ sys.stdout = PacifyFlushWrapper(sys.stdout)
+ sys.stderr = PacifyFlushWrapper(sys.stderr)
+ sys.exit(1)
+ else:
+ raise
+ except Exit as e:
+ if standalone_mode:
+ sys.exit(e.exit_code)
+ else:
+ # in non-standalone mode, return the exit code
+ # note that this is only reached if `self.invoke` above raises
+ # an Exit explicitly -- thus bypassing the check there which
+ # would return its result
+ # the results of non-standalone execution may therefore be
+ # somewhat ambiguous: if there are codepaths which lead to
+ # `ctx.exit(1)` and to `return 1`, the caller won't be able to
+ # tell the difference between the two
+ return e.exit_code
+ except Abort:
+ if not standalone_mode:
+ raise
+ echo("Aborted!", file=sys.stderr)
+ sys.exit(1)
+
+ def __call__(self, *args, **kwargs):
+ """Alias for :meth:`main`."""
+ return self.main(*args, **kwargs)
+
+
+class Command(BaseCommand):
+ """Commands are the basic building block of command line interfaces in
+ Click. A basic command handles command line parsing and might dispatch
+ more parsing to commands nested below it.
+
+ .. versionchanged:: 2.0
+ Added the `context_settings` parameter.
+ .. versionchanged:: 7.1
+ Added the `no_args_is_help` parameter.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ :param callback: the callback to invoke. This is optional.
+ :param params: the parameters to register with this command. This can
+ be either :class:`Option` or :class:`Argument` objects.
+ :param help: the help string to use for this command.
+ :param epilog: like the help string but it's printed at the end of the
+ help page after everything else.
+ :param short_help: the short help to use for this command. This is
+ shown on the command listing of the parent command.
+ :param add_help_option: by default each command registers a ``--help``
+ option. This can be disabled by this parameter.
+ :param no_args_is_help: this controls what happens if no arguments are
+ provided. This option is disabled by default.
+ If enabled this will add ``--help`` as argument
+ if no arguments are passed
+ :param hidden: hide this command from help outputs.
+
+ :param deprecated: issues a message indicating that
+ the command is deprecated.
+ """
+
+ def __init__(
+ self,
+ name,
+ context_settings=None,
+ callback=None,
+ params=None,
+ help=None,
+ epilog=None,
+ short_help=None,
+ options_metavar="[OPTIONS]",
+ add_help_option=True,
+ no_args_is_help=False,
+ hidden=False,
+ deprecated=False,
+ ):
+ BaseCommand.__init__(self, name, context_settings)
+ #: the callback to execute when the command fires. This might be
+ #: `None` in which case nothing happens.
+ self.callback = callback
+ #: the list of parameters for this command in the order they
+ #: should show up in the help page and execute. Eager parameters
+ #: will automatically be handled before non eager ones.
+ self.params = params or []
+ # if a form feed (page break) is found in the help text, truncate help
+ # text to the content preceding the first form feed
+ if help and "\f" in help:
+ help = help.split("\f", 1)[0]
+ self.help = help
+ self.epilog = epilog
+ self.options_metavar = options_metavar
+ self.short_help = short_help
+ self.add_help_option = add_help_option
+ self.no_args_is_help = no_args_is_help
+ self.hidden = hidden
+ self.deprecated = deprecated
+
+ def get_usage(self, ctx):
+ """Formats the usage line into a string and returns it.
+
+ Calls :meth:`format_usage` internally.
+ """
+ formatter = ctx.make_formatter()
+ self.format_usage(ctx, formatter)
+ return formatter.getvalue().rstrip("\n")
+
+ def get_params(self, ctx):
+ rv = self.params
+ help_option = self.get_help_option(ctx)
+ if help_option is not None:
+ rv = rv + [help_option]
+ return rv
+
+ def format_usage(self, ctx, formatter):
+ """Writes the usage line into the formatter.
+
+ This is a low-level method called by :meth:`get_usage`.
+ """
+ pieces = self.collect_usage_pieces(ctx)
+ formatter.write_usage(ctx.command_path, " ".join(pieces))
+
+ def collect_usage_pieces(self, ctx):
+ """Returns all the pieces that go into the usage line and returns
+ it as a list of strings.
+ """
+ rv = [self.options_metavar]
+ for param in self.get_params(ctx):
+ rv.extend(param.get_usage_pieces(ctx))
+ return rv
+
+ def get_help_option_names(self, ctx):
+ """Returns the names for the help option."""
+ all_names = set(ctx.help_option_names)
+ for param in self.params:
+ all_names.difference_update(param.opts)
+ all_names.difference_update(param.secondary_opts)
+ return all_names
+
+ def get_help_option(self, ctx):
+ """Returns the help option object."""
+ help_options = self.get_help_option_names(ctx)
+ if not help_options or not self.add_help_option:
+ return
+
+ def show_help(ctx, param, value):
+ if value and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ return Option(
+ help_options,
+ is_flag=True,
+ is_eager=True,
+ expose_value=False,
+ callback=show_help,
+ help="Show this message and exit.",
+ )
+
+ def make_parser(self, ctx):
+ """Creates the underlying option parser for this command."""
+ parser = OptionParser(ctx)
+ for param in self.get_params(ctx):
+ param.add_to_parser(parser, ctx)
+ return parser
+
+ def get_help(self, ctx):
+ """Formats the help into a string and returns it.
+
+ Calls :meth:`format_help` internally.
+ """
+ formatter = ctx.make_formatter()
+ self.format_help(ctx, formatter)
+ return formatter.getvalue().rstrip("\n")
+
+ def get_short_help_str(self, limit=45):
+ """Gets short help for the command or makes it by shortening the
+ long help string.
+ """
+ return (
+ self.short_help
+ or self.help
+ and make_default_short_help(self.help, limit)
+ or ""
+ )
+
+ def format_help(self, ctx, formatter):
+ """Writes the help into the formatter if it exists.
+
+ This is a low-level method called by :meth:`get_help`.
+
+ This calls the following methods:
+
+ - :meth:`format_usage`
+ - :meth:`format_help_text`
+ - :meth:`format_options`
+ - :meth:`format_epilog`
+ """
+ self.format_usage(ctx, formatter)
+ self.format_help_text(ctx, formatter)
+ self.format_options(ctx, formatter)
+ self.format_epilog(ctx, formatter)
+
+ def format_help_text(self, ctx, formatter):
+ """Writes the help text to the formatter if it exists."""
+ if self.help:
+ formatter.write_paragraph()
+ with formatter.indentation():
+ help_text = self.help
+ if self.deprecated:
+ help_text += DEPRECATED_HELP_NOTICE
+ formatter.write_text(help_text)
+ elif self.deprecated:
+ formatter.write_paragraph()
+ with formatter.indentation():
+ formatter.write_text(DEPRECATED_HELP_NOTICE)
+
+ def format_options(self, ctx, formatter):
+ """Writes all the options into the formatter if they exist."""
+ opts = []
+ for param in self.get_params(ctx):
+ rv = param.get_help_record(ctx)
+ if rv is not None:
+ opts.append(rv)
+
+ if opts:
+ with formatter.section("Options"):
+ formatter.write_dl(opts)
+
+ def format_epilog(self, ctx, formatter):
+ """Writes the epilog into the formatter if it exists."""
+ if self.epilog:
+ formatter.write_paragraph()
+ with formatter.indentation():
+ formatter.write_text(self.epilog)
+
+ def parse_args(self, ctx, args):
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ parser = self.make_parser(ctx)
+ opts, args, param_order = parser.parse_args(args=args)
+
+ for param in iter_params_for_processing(param_order, self.get_params(ctx)):
+ value, args = param.handle_parse_result(ctx, opts, args)
+
+ if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
+ ctx.fail(
+ "Got unexpected extra argument{} ({})".format(
+ "s" if len(args) != 1 else "", " ".join(map(make_str, args))
+ )
+ )
+
+ ctx.args = args
+ return args
+
+ def invoke(self, ctx):
+ """Given a context, this invokes the attached callback (if it exists)
+ in the right way.
+ """
+ _maybe_show_deprecated_notice(self)
+ if self.callback is not None:
+ return ctx.invoke(self.callback, **ctx.params)
+
+
+class MultiCommand(Command):
+ """A multi command is the basic implementation of a command that
+ dispatches to subcommands. The most common version is the
+ :class:`Group`.
+
+ :param invoke_without_command: this controls how the multi command itself
+ is invoked. By default it's only invoked
+ if a subcommand is provided.
+ :param no_args_is_help: this controls what happens if no arguments are
+ provided. This option is enabled by default if
+ `invoke_without_command` is disabled or disabled
+ if it's enabled. If enabled this will add
+ ``--help`` as argument if no arguments are
+ passed.
+ :param subcommand_metavar: the string that is used in the documentation
+ to indicate the subcommand place.
+ :param chain: if this is set to `True` chaining of multiple subcommands
+ is enabled. This restricts the form of commands in that
+ they cannot have optional arguments but it allows
+ multiple commands to be chained together.
+ :param result_callback: the result callback to attach to this multi
+ command.
+ """
+
+ allow_extra_args = True
+ allow_interspersed_args = False
+
+ def __init__(
+ self,
+ name=None,
+ invoke_without_command=False,
+ no_args_is_help=None,
+ subcommand_metavar=None,
+ chain=False,
+ result_callback=None,
+ **attrs
+ ):
+ Command.__init__(self, name, **attrs)
+ if no_args_is_help is None:
+ no_args_is_help = not invoke_without_command
+ self.no_args_is_help = no_args_is_help
+ self.invoke_without_command = invoke_without_command
+ if subcommand_metavar is None:
+ if chain:
+ subcommand_metavar = SUBCOMMANDS_METAVAR
+ else:
+ subcommand_metavar = SUBCOMMAND_METAVAR
+ self.subcommand_metavar = subcommand_metavar
+ self.chain = chain
+ #: The result callback that is stored. This can be set or
+ #: overridden with the :func:`resultcallback` decorator.
+ self.result_callback = result_callback
+
+ if self.chain:
+ for param in self.params:
+ if isinstance(param, Argument) and not param.required:
+ raise RuntimeError(
+ "Multi commands in chain mode cannot have"
+ " optional arguments."
+ )
+
+ def collect_usage_pieces(self, ctx):
+ rv = Command.collect_usage_pieces(self, ctx)
+ rv.append(self.subcommand_metavar)
+ return rv
+
+ def format_options(self, ctx, formatter):
+ Command.format_options(self, ctx, formatter)
+ self.format_commands(ctx, formatter)
+
+ def resultcallback(self, replace=False):
+ """Adds a result callback to the chain command. By default if a
+ result callback is already registered this will chain them but
+ this can be disabled with the `replace` parameter. The result
+ callback is invoked with the return value of the subcommand
+ (or the list of return values from all subcommands if chaining
+ is enabled) as well as the parameters as they would be passed
+ to the main callback.
+
+ Example::
+
+ @click.group()
+ @click.option('-i', '--input', default=23)
+ def cli(input):
+ return 42
+
+ @cli.resultcallback()
+ def process_result(result, input):
+ return result + input
+
+ .. versionadded:: 3.0
+
+ :param replace: if set to `True` an already existing result
+ callback will be removed.
+ """
+
+ def decorator(f):
+ old_callback = self.result_callback
+ if old_callback is None or replace:
+ self.result_callback = f
+ return f
+
+ def function(__value, *args, **kwargs):
+ return f(old_callback(__value, *args, **kwargs), *args, **kwargs)
+
+ self.result_callback = rv = update_wrapper(function, f)
+ return rv
+
+ return decorator
+
+ def format_commands(self, ctx, formatter):
+ """Extra format methods for multi methods that adds all the commands
+ after the options.
+ """
+ commands = []
+ for subcommand in self.list_commands(ctx):
+ cmd = self.get_command(ctx, subcommand)
+ # What is this, the tool lied about a command. Ignore it
+ if cmd is None:
+ continue
+ if cmd.hidden:
+ continue
+
+ commands.append((subcommand, cmd))
+
+ # allow for 3 times the default spacing
+ if len(commands):
+ limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands)
+
+ rows = []
+ for subcommand, cmd in commands:
+ help = cmd.get_short_help_str(limit)
+ rows.append((subcommand, help))
+
+ if rows:
+ with formatter.section("Commands"):
+ formatter.write_dl(rows)
+
+ def parse_args(self, ctx, args):
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ rest = Command.parse_args(self, ctx, args)
+ if self.chain:
+ ctx.protected_args = rest
+ ctx.args = []
+ elif rest:
+ ctx.protected_args, ctx.args = rest[:1], rest[1:]
+
+ return ctx.args
+
+ def invoke(self, ctx):
+ def _process_result(value):
+ if self.result_callback is not None:
+ value = ctx.invoke(self.result_callback, value, **ctx.params)
+ return value
+
+ if not ctx.protected_args:
+ # If we are invoked without command the chain flag controls
+ # how this happens. If we are not in chain mode, the return
+ # value here is the return value of the command.
+ # If however we are in chain mode, the return value is the
+ # return value of the result processor invoked with an empty
+ # list (which means that no subcommand actually was executed).
+ if self.invoke_without_command:
+ if not self.chain:
+ return Command.invoke(self, ctx)
+ with ctx:
+ Command.invoke(self, ctx)
+ return _process_result([])
+ ctx.fail("Missing command.")
+
+ # Fetch args back out
+ args = ctx.protected_args + ctx.args
+ ctx.args = []
+ ctx.protected_args = []
+
+ # If we're not in chain mode, we only allow the invocation of a
+ # single command but we also inform the current context about the
+ # name of the command to invoke.
+ if not self.chain:
+ # Make sure the context is entered so we do not clean up
+ # resources until the result processor has worked.
+ with ctx:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ ctx.invoked_subcommand = cmd_name
+ Command.invoke(self, ctx)
+ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
+ with sub_ctx:
+ return _process_result(sub_ctx.command.invoke(sub_ctx))
+
+ # In chain mode we create the contexts step by step, but after the
+ # base command has been invoked. Because at that point we do not
+ # know the subcommands yet, the invoked subcommand attribute is
+ # set to ``*`` to inform the command that subcommands are executed
+ # but nothing else.
+ with ctx:
+ ctx.invoked_subcommand = "*" if args else None
+ Command.invoke(self, ctx)
+
+ # Otherwise we make every single context and invoke them in a
+ # chain. In that case the return value to the result processor
+ # is the list of all invoked subcommand's results.
+ contexts = []
+ while args:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ sub_ctx = cmd.make_context(
+ cmd_name,
+ args,
+ parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ )
+ contexts.append(sub_ctx)
+ args, sub_ctx.args = sub_ctx.args, []
+
+ rv = []
+ for sub_ctx in contexts:
+ with sub_ctx:
+ rv.append(sub_ctx.command.invoke(sub_ctx))
+ return _process_result(rv)
+
+ def resolve_command(self, ctx, args):
+ cmd_name = make_str(args[0])
+ original_cmd_name = cmd_name
+
+ # Get the command
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we can't find the command but there is a normalization
+ # function available, we try with that one.
+ if cmd is None and ctx.token_normalize_func is not None:
+ cmd_name = ctx.token_normalize_func(cmd_name)
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we don't find the command we want to show an error message
+ # to the user that it was not provided. However, there is
+ # something else we should do: if the first argument looks like
+ # an option we want to kick off parsing again for arguments to
+ # resolve things like --help which now should go to the main
+ # place.
+ if cmd is None and not ctx.resilient_parsing:
+ if split_opt(cmd_name)[0]:
+ self.parse_args(ctx, ctx.args)
+ ctx.fail("No such command '{}'.".format(original_cmd_name))
+
+ return cmd_name, cmd, args[1:]
+
+ def get_command(self, ctx, cmd_name):
+ """Given a context and a command name, this returns a
+ :class:`Command` object if it exists or returns `None`.
+ """
+ raise NotImplementedError()
+
+ def list_commands(self, ctx):
+ """Returns a list of subcommand names in the order they should
+ appear.
+ """
+ return []
+
+
+class Group(MultiCommand):
+ """A group allows a command to have subcommands attached. This is the
+ most common way to implement nesting in Click.
+
+ :param commands: a dictionary of commands.
+ """
+
+ def __init__(self, name=None, commands=None, **attrs):
+ MultiCommand.__init__(self, name, **attrs)
+ #: the registered subcommands by their exported names.
+ self.commands = commands or {}
+
+ def add_command(self, cmd, name=None):
+ """Registers another :class:`Command` with this group. If the name
+ is not provided, the name of the command is used.
+ """
+ name = name or cmd.name
+ if name is None:
+ raise TypeError("Command has no name.")
+ _check_multicommand(self, name, cmd, register=True)
+ self.commands[name] = cmd
+
+ def command(self, *args, **kwargs):
+ """A shortcut decorator for declaring and attaching a command to
+ the group. This takes the same arguments as :func:`command` but
+ immediately registers the created command with this instance by
+ calling into :meth:`add_command`.
+ """
+ from .decorators import command
+
+ def decorator(f):
+ cmd = command(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+
+ return decorator
+
+ def group(self, *args, **kwargs):
+ """A shortcut decorator for declaring and attaching a group to
+ the group. This takes the same arguments as :func:`group` but
+ immediately registers the created command with this instance by
+ calling into :meth:`add_command`.
+ """
+ from .decorators import group
+
+ def decorator(f):
+ cmd = group(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+
+ return decorator
+
+ def get_command(self, ctx, cmd_name):
+ return self.commands.get(cmd_name)
+
+ def list_commands(self, ctx):
+ return sorted(self.commands)
+
+
+class CommandCollection(MultiCommand):
+ """A command collection is a multi command that merges multiple multi
+ commands together into one. This is a straightforward implementation
+ that accepts a list of different multi commands as sources and
+ provides all the commands for each of them.
+ """
+
+ def __init__(self, name=None, sources=None, **attrs):
+ MultiCommand.__init__(self, name, **attrs)
+ #: The list of registered multi commands.
+ self.sources = sources or []
+
+ def add_source(self, multi_cmd):
+ """Adds a new multi command to the chain dispatcher."""
+ self.sources.append(multi_cmd)
+
+ def get_command(self, ctx, cmd_name):
+ for source in self.sources:
+ rv = source.get_command(ctx, cmd_name)
+ if rv is not None:
+ if self.chain:
+ _check_multicommand(self, cmd_name, rv)
+ return rv
+
+ def list_commands(self, ctx):
+ rv = set()
+ for source in self.sources:
+ rv.update(source.list_commands(ctx))
+ return sorted(rv)
+
+
+class Parameter(object):
+ r"""A parameter to a command comes in two versions: they are either
+ :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently
+ not supported by design as some of the internals for parsing are
+ intentionally not finalized.
+
+ Some settings are supported by both options and arguments.
+
+ :param param_decls: the parameter declarations for this option or
+ argument. This is a list of flags or argument
+ names.
+ :param type: the type that should be used. Either a :class:`ParamType`
+ or a Python type. The later is converted into the former
+ automatically if supported.
+ :param required: controls if this is optional or not.
+ :param default: the default value if omitted. This can also be a callable,
+ in which case it's invoked when the default is needed
+ without any arguments.
+ :param callback: a callback that should be executed after the parameter
+ was matched. This is called as ``fn(ctx, param,
+ value)`` and needs to return the value.
+ :param nargs: the number of arguments to match. If not ``1`` the return
+ value is a tuple instead of single value. The default for
+ nargs is ``1`` (except if the type is a tuple, then it's
+ the arity of the tuple).
+ :param metavar: how the value is represented in the help page.
+ :param expose_value: if this is `True` then the value is passed onwards
+ to the command callback and stored on the context,
+ otherwise it's skipped.
+ :param is_eager: eager values are processed before non eager ones. This
+ should not be set for arguments or it will inverse the
+ order of processing.
+ :param envvar: a string or list of strings that are environment variables
+ that should be checked.
+
+ .. versionchanged:: 7.1
+ Empty environment variables are ignored rather than taking the
+ empty string value. This makes it possible for scripts to clear
+ variables if they can't unset them.
+
+ .. versionchanged:: 2.0
+ Changed signature for parameter callback to also be passed the
+ parameter. The old callback format will still work, but it will
+ raise a warning to give you a chance to migrate the code easier.
+ """
+ param_type_name = "parameter"
+
+ def __init__(
+ self,
+ param_decls=None,
+ type=None,
+ required=False,
+ default=None,
+ callback=None,
+ nargs=None,
+ metavar=None,
+ expose_value=True,
+ is_eager=False,
+ envvar=None,
+ autocompletion=None,
+ ):
+ self.name, self.opts, self.secondary_opts = self._parse_decls(
+ param_decls or (), expose_value
+ )
+
+ self.type = convert_type(type, default)
+
+ # Default nargs to what the type tells us if we have that
+ # information available.
+ if nargs is None:
+ if self.type.is_composite:
+ nargs = self.type.arity
+ else:
+ nargs = 1
+
+ self.required = required
+ self.callback = callback
+ self.nargs = nargs
+ self.multiple = False
+ self.expose_value = expose_value
+ self.default = default
+ self.is_eager = is_eager
+ self.metavar = metavar
+ self.envvar = envvar
+ self.autocompletion = autocompletion
+
+ def __repr__(self):
+ return "<{} {}>".format(self.__class__.__name__, self.name)
+
+ @property
+ def human_readable_name(self):
+ """Returns the human readable name of this parameter. This is the
+ same as the name for options, but the metavar for arguments.
+ """
+ return self.name
+
+ def make_metavar(self):
+ if self.metavar is not None:
+ return self.metavar
+ metavar = self.type.get_metavar(self)
+ if metavar is None:
+ metavar = self.type.name.upper()
+ if self.nargs != 1:
+ metavar += "..."
+ return metavar
+
+ def get_default(self, ctx):
+ """Given a context variable this calculates the default value."""
+ # Otherwise go with the regular default.
+ if callable(self.default):
+ rv = self.default()
+ else:
+ rv = self.default
+ return self.type_cast_value(ctx, rv)
+
+ def add_to_parser(self, parser, ctx):
+ pass
+
+ def consume_value(self, ctx, opts):
+ value = opts.get(self.name)
+ if value is None:
+ value = self.value_from_envvar(ctx)
+ if value is None:
+ value = ctx.lookup_default(self.name)
+ return value
+
+ def type_cast_value(self, ctx, value):
+ """Given a value this runs it properly through the type system.
+ This automatically handles things like `nargs` and `multiple` as
+ well as composite types.
+ """
+ if self.type.is_composite:
+ if self.nargs <= 1:
+ raise TypeError(
+ "Attempted to invoke composite type but nargs has"
+ " been set to {}. This is not supported; nargs"
+ " needs to be set to a fixed value > 1.".format(self.nargs)
+ )
+ if self.multiple:
+ return tuple(self.type(x or (), self, ctx) for x in value or ())
+ return self.type(value or (), self, ctx)
+
+ def _convert(value, level):
+ if level == 0:
+ return self.type(value, self, ctx)
+ return tuple(_convert(x, level - 1) for x in value or ())
+
+ return _convert(value, (self.nargs != 1) + bool(self.multiple))
+
+ def process_value(self, ctx, value):
+ """Given a value and context this runs the logic to convert the
+ value as necessary.
+ """
+ # If the value we were given is None we do nothing. This way
+ # code that calls this can easily figure out if something was
+ # not provided. Otherwise it would be converted into an empty
+ # tuple for multiple invocations which is inconvenient.
+ if value is not None:
+ return self.type_cast_value(ctx, value)
+
+ def value_is_missing(self, value):
+ if value is None:
+ return True
+ if (self.nargs != 1 or self.multiple) and value == ():
+ return True
+ return False
+
+ def full_process_value(self, ctx, value):
+ value = self.process_value(ctx, value)
+
+ if value is None and not ctx.resilient_parsing:
+ value = self.get_default(ctx)
+
+ if self.required and self.value_is_missing(value):
+ raise MissingParameter(ctx=ctx, param=self)
+
+ return value
+
+ def resolve_envvar_value(self, ctx):
+ if self.envvar is None:
+ return
+ if isinstance(self.envvar, (tuple, list)):
+ for envvar in self.envvar:
+ rv = os.environ.get(envvar)
+ if rv is not None:
+ return rv
+ else:
+ rv = os.environ.get(self.envvar)
+
+ if rv != "":
+ return rv
+
+ def value_from_envvar(self, ctx):
+ rv = self.resolve_envvar_value(ctx)
+ if rv is not None and self.nargs != 1:
+ rv = self.type.split_envvar_value(rv)
+ return rv
+
+ def handle_parse_result(self, ctx, opts, args):
+ with augment_usage_errors(ctx, param=self):
+ value = self.consume_value(ctx, opts)
+ try:
+ value = self.full_process_value(ctx, value)
+ except Exception:
+ if not ctx.resilient_parsing:
+ raise
+ value = None
+ if self.callback is not None:
+ try:
+ value = invoke_param_callback(self.callback, ctx, self, value)
+ except Exception:
+ if not ctx.resilient_parsing:
+ raise
+
+ if self.expose_value:
+ ctx.params[self.name] = value
+ return value, args
+
+ def get_help_record(self, ctx):
+ pass
+
+ def get_usage_pieces(self, ctx):
+ return []
+
+ def get_error_hint(self, ctx):
+ """Get a stringified version of the param for use in error messages to
+ indicate which param caused the error.
+ """
+ hint_list = self.opts or [self.human_readable_name]
+ return " / ".join(repr(x) for x in hint_list)
+
+
+class Option(Parameter):
+ """Options are usually optional values on the command line and
+ have some extra features that arguments don't have.
+
+ All other parameters are passed onwards to the parameter constructor.
+
+ :param show_default: controls if the default value should be shown on the
+ help page. Normally, defaults are not shown. If this
+ value is a string, it shows the string instead of the
+ value. This is particularly useful for dynamic options.
+ :param show_envvar: controls if an environment variable should be shown on
+ the help page. Normally, environment variables
+ are not shown.
+ :param prompt: if set to `True` or a non empty string then the user will be
+ prompted for input. If set to `True` the prompt will be the
+ option name capitalized.
+ :param confirmation_prompt: if set then the value will need to be confirmed
+ if it was prompted for.
+ :param hide_input: if this is `True` then the input on the prompt will be
+ hidden from the user. This is useful for password
+ input.
+ :param is_flag: forces this option to act as a flag. The default is
+ auto detection.
+ :param flag_value: which value should be used for this flag if it's
+ enabled. This is set to a boolean automatically if
+ the option string contains a slash to mark two options.
+ :param multiple: if this is set to `True` then the argument is accepted
+ multiple times and recorded. This is similar to ``nargs``
+ in how it works but supports arbitrary number of
+ arguments.
+ :param count: this flag makes an option increment an integer.
+ :param allow_from_autoenv: if this is enabled then the value of this
+ parameter will be pulled from an environment
+ variable in case a prefix is defined on the
+ context.
+ :param help: the help string.
+ :param hidden: hide this option from help outputs.
+ """
+
+ param_type_name = "option"
+
+ def __init__(
+ self,
+ param_decls=None,
+ show_default=False,
+ prompt=False,
+ confirmation_prompt=False,
+ hide_input=False,
+ is_flag=None,
+ flag_value=None,
+ multiple=False,
+ count=False,
+ allow_from_autoenv=True,
+ type=None,
+ help=None,
+ hidden=False,
+ show_choices=True,
+ show_envvar=False,
+ **attrs
+ ):
+ default_is_missing = attrs.get("default", _missing) is _missing
+ Parameter.__init__(self, param_decls, type=type, **attrs)
+
+ if prompt is True:
+ prompt_text = self.name.replace("_", " ").capitalize()
+ elif prompt is False:
+ prompt_text = None
+ else:
+ prompt_text = prompt
+ self.prompt = prompt_text
+ self.confirmation_prompt = confirmation_prompt
+ self.hide_input = hide_input
+ self.hidden = hidden
+
+ # Flags
+ if is_flag is None:
+ if flag_value is not None:
+ is_flag = True
+ else:
+ is_flag = bool(self.secondary_opts)
+ if is_flag and default_is_missing:
+ self.default = False
+ if flag_value is None:
+ flag_value = not self.default
+ self.is_flag = is_flag
+ self.flag_value = flag_value
+ if self.is_flag and isinstance(self.flag_value, bool) and type in [None, bool]:
+ self.type = BOOL
+ self.is_bool_flag = True
+ else:
+ self.is_bool_flag = False
+
+ # Counting
+ self.count = count
+ if count:
+ if type is None:
+ self.type = IntRange(min=0)
+ if default_is_missing:
+ self.default = 0
+
+ self.multiple = multiple
+ self.allow_from_autoenv = allow_from_autoenv
+ self.help = help
+ self.show_default = show_default
+ self.show_choices = show_choices
+ self.show_envvar = show_envvar
+
+ # Sanity check for stuff we don't support
+ if __debug__:
+ if self.nargs < 0:
+ raise TypeError("Options cannot have nargs < 0")
+ if self.prompt and self.is_flag and not self.is_bool_flag:
+ raise TypeError("Cannot prompt for flags that are not bools.")
+ if not self.is_bool_flag and self.secondary_opts:
+ raise TypeError("Got secondary option for non boolean flag.")
+ if self.is_bool_flag and self.hide_input and self.prompt is not None:
+ raise TypeError("Hidden input does not work with boolean flag prompts.")
+ if self.count:
+ if self.multiple:
+ raise TypeError(
+ "Options cannot be multiple and count at the same time."
+ )
+ elif self.is_flag:
+ raise TypeError(
+ "Options cannot be count and flags at the same time."
+ )
+
+ def _parse_decls(self, decls, expose_value):
+ opts = []
+ secondary_opts = []
+ name = None
+ possible_names = []
+
+ for decl in decls:
+ if isidentifier(decl):
+ if name is not None:
+ raise TypeError("Name defined twice")
+ name = decl
+ else:
+ split_char = ";" if decl[:1] == "/" else "/"
+ if split_char in decl:
+ first, second = decl.split(split_char, 1)
+ first = first.rstrip()
+ if first:
+ possible_names.append(split_opt(first))
+ opts.append(first)
+ second = second.lstrip()
+ if second:
+ secondary_opts.append(second.lstrip())
+ else:
+ possible_names.append(split_opt(decl))
+ opts.append(decl)
+
+ if name is None and possible_names:
+ possible_names.sort(key=lambda x: -len(x[0])) # group long options first
+ name = possible_names[0][1].replace("-", "_").lower()
+ if not isidentifier(name):
+ name = None
+
+ if name is None:
+ if not expose_value:
+ return None, opts, secondary_opts
+ raise TypeError("Could not determine name for option")
+
+ if not opts and not secondary_opts:
+ raise TypeError(
+ "No options defined but a name was passed ({}). Did you"
+ " mean to declare an argument instead of an option?".format(name)
+ )
+
+ return name, opts, secondary_opts
+
+ def add_to_parser(self, parser, ctx):
+ kwargs = {
+ "dest": self.name,
+ "nargs": self.nargs,
+ "obj": self,
+ }
+
+ if self.multiple:
+ action = "append"
+ elif self.count:
+ action = "count"
+ else:
+ action = "store"
+
+ if self.is_flag:
+ kwargs.pop("nargs", None)
+ action_const = "{}_const".format(action)
+ if self.is_bool_flag and self.secondary_opts:
+ parser.add_option(self.opts, action=action_const, const=True, **kwargs)
+ parser.add_option(
+ self.secondary_opts, action=action_const, const=False, **kwargs
+ )
+ else:
+ parser.add_option(
+ self.opts, action=action_const, const=self.flag_value, **kwargs
+ )
+ else:
+ kwargs["action"] = action
+ parser.add_option(self.opts, **kwargs)
+
+ def get_help_record(self, ctx):
+ if self.hidden:
+ return
+ any_prefix_is_slash = []
+
+ def _write_opts(opts):
+ rv, any_slashes = join_options(opts)
+ if any_slashes:
+ any_prefix_is_slash[:] = [True]
+ if not self.is_flag and not self.count:
+ rv += " {}".format(self.make_metavar())
+ return rv
+
+ rv = [_write_opts(self.opts)]
+ if self.secondary_opts:
+ rv.append(_write_opts(self.secondary_opts))
+
+ help = self.help or ""
+ extra = []
+ if self.show_envvar:
+ envvar = self.envvar
+ if envvar is None:
+ if self.allow_from_autoenv and ctx.auto_envvar_prefix is not None:
+ envvar = "{}_{}".format(ctx.auto_envvar_prefix, self.name.upper())
+ if envvar is not None:
+ extra.append(
+ "env var: {}".format(
+ ", ".join(str(d) for d in envvar)
+ if isinstance(envvar, (list, tuple))
+ else envvar
+ )
+ )
+ if self.default is not None and (self.show_default or ctx.show_default):
+ if isinstance(self.show_default, string_types):
+ default_string = "({})".format(self.show_default)
+ elif isinstance(self.default, (list, tuple)):
+ default_string = ", ".join(str(d) for d in self.default)
+ elif inspect.isfunction(self.default):
+ default_string = "(dynamic)"
+ else:
+ default_string = self.default
+ extra.append("default: {}".format(default_string))
+
+ if self.required:
+ extra.append("required")
+ if extra:
+ help = "{}[{}]".format(
+ "{} ".format(help) if help else "", "; ".join(extra)
+ )
+
+ return ("; " if any_prefix_is_slash else " / ").join(rv), help
+
+ def get_default(self, ctx):
+ # If we're a non boolean flag our default is more complex because
+ # we need to look at all flags in the same group to figure out
+ # if we're the the default one in which case we return the flag
+ # value as default.
+ if self.is_flag and not self.is_bool_flag:
+ for param in ctx.command.params:
+ if param.name == self.name and param.default:
+ return param.flag_value
+ return None
+ return Parameter.get_default(self, ctx)
+
+ def prompt_for_value(self, ctx):
+ """This is an alternative flow that can be activated in the full
+ value processing if a value does not exist. It will prompt the
+ user until a valid value exists and then returns the processed
+ value as result.
+ """
+ # Calculate the default before prompting anything to be stable.
+ default = self.get_default(ctx)
+
+ # If this is a prompt for a flag we need to handle this
+ # differently.
+ if self.is_bool_flag:
+ return confirm(self.prompt, default)
+
+ return prompt(
+ self.prompt,
+ default=default,
+ type=self.type,
+ hide_input=self.hide_input,
+ show_choices=self.show_choices,
+ confirmation_prompt=self.confirmation_prompt,
+ value_proc=lambda x: self.process_value(ctx, x),
+ )
+
+ def resolve_envvar_value(self, ctx):
+ rv = Parameter.resolve_envvar_value(self, ctx)
+ if rv is not None:
+ return rv
+ if self.allow_from_autoenv and ctx.auto_envvar_prefix is not None:
+ envvar = "{}_{}".format(ctx.auto_envvar_prefix, self.name.upper())
+ return os.environ.get(envvar)
+
+ def value_from_envvar(self, ctx):
+ rv = self.resolve_envvar_value(ctx)
+ if rv is None:
+ return None
+ value_depth = (self.nargs != 1) + bool(self.multiple)
+ if value_depth > 0 and rv is not None:
+ rv = self.type.split_envvar_value(rv)
+ if self.multiple and self.nargs != 1:
+ rv = batch(rv, self.nargs)
+ return rv
+
+ def full_process_value(self, ctx, value):
+ if value is None and self.prompt is not None and not ctx.resilient_parsing:
+ return self.prompt_for_value(ctx)
+ return Parameter.full_process_value(self, ctx, value)
+
+
+class Argument(Parameter):
+ """Arguments are positional parameters to a command. They generally
+ provide fewer features than options but can have infinite ``nargs``
+ and are required by default.
+
+ All parameters are passed onwards to the parameter constructor.
+ """
+
+ param_type_name = "argument"
+
+ def __init__(self, param_decls, required=None, **attrs):
+ if required is None:
+ if attrs.get("default") is not None:
+ required = False
+ else:
+ required = attrs.get("nargs", 1) > 0
+ Parameter.__init__(self, param_decls, required=required, **attrs)
+ if self.default is not None and self.nargs < 0:
+ raise TypeError(
+ "nargs=-1 in combination with a default value is not supported."
+ )
+
+ @property
+ def human_readable_name(self):
+ if self.metavar is not None:
+ return self.metavar
+ return self.name.upper()
+
+ def make_metavar(self):
+ if self.metavar is not None:
+ return self.metavar
+ var = self.type.get_metavar(self)
+ if not var:
+ var = self.name.upper()
+ if not self.required:
+ var = "[{}]".format(var)
+ if self.nargs != 1:
+ var += "..."
+ return var
+
+ def _parse_decls(self, decls, expose_value):
+ if not decls:
+ if not expose_value:
+ return None, [], []
+ raise TypeError("Could not determine name for argument")
+ if len(decls) == 1:
+ name = arg = decls[0]
+ name = name.replace("-", "_").lower()
+ else:
+ raise TypeError(
+ "Arguments take exactly one parameter declaration, got"
+ " {}".format(len(decls))
+ )
+ return name, [arg], []
+
+ def get_usage_pieces(self, ctx):
+ return [self.make_metavar()]
+
+ def get_error_hint(self, ctx):
+ return repr(self.make_metavar())
+
+ def add_to_parser(self, parser, ctx):
+ parser.add_argument(dest=self.name, nargs=self.nargs, obj=self)
diff --git a/matteo_env/Lib/site-packages/click/decorators.py b/matteo_env/Lib/site-packages/click/decorators.py
new file mode 100644
index 0000000..c7b5af6
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/decorators.py
@@ -0,0 +1,333 @@
+import inspect
+import sys
+from functools import update_wrapper
+
+from ._compat import iteritems
+from ._unicodefun import _check_for_unicode_literals
+from .core import Argument
+from .core import Command
+from .core import Group
+from .core import Option
+from .globals import get_current_context
+from .utils import echo
+
+
+def pass_context(f):
+ """Marks a callback as wanting to receive the current context
+ object as first argument.
+ """
+
+ def new_func(*args, **kwargs):
+ return f(get_current_context(), *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def pass_obj(f):
+ """Similar to :func:`pass_context`, but only pass the object on the
+ context onwards (:attr:`Context.obj`). This is useful if that object
+ represents the state of a nested system.
+ """
+
+ def new_func(*args, **kwargs):
+ return f(get_current_context().obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def make_pass_decorator(object_type, ensure=False):
+ """Given an object type this creates a decorator that will work
+ similar to :func:`pass_obj` but instead of passing the object of the
+ current context, it will find the innermost context of type
+ :func:`object_type`.
+
+ This generates a decorator that works roughly like this::
+
+ from functools import update_wrapper
+
+ def decorator(f):
+ @pass_context
+ def new_func(ctx, *args, **kwargs):
+ obj = ctx.find_object(object_type)
+ return ctx.invoke(f, obj, *args, **kwargs)
+ return update_wrapper(new_func, f)
+ return decorator
+
+ :param object_type: the type of the object to pass.
+ :param ensure: if set to `True`, a new object will be created and
+ remembered on the context if it's not there yet.
+ """
+
+ def decorator(f):
+ def new_func(*args, **kwargs):
+ ctx = get_current_context()
+ if ensure:
+ obj = ctx.ensure_object(object_type)
+ else:
+ obj = ctx.find_object(object_type)
+ if obj is None:
+ raise RuntimeError(
+ "Managed to invoke callback without a context"
+ " object of type '{}' existing".format(object_type.__name__)
+ )
+ return ctx.invoke(f, obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+ return decorator
+
+
+def _make_command(f, name, attrs, cls):
+ if isinstance(f, Command):
+ raise TypeError("Attempted to convert a callback into a command twice.")
+ try:
+ params = f.__click_params__
+ params.reverse()
+ del f.__click_params__
+ except AttributeError:
+ params = []
+ help = attrs.get("help")
+ if help is None:
+ help = inspect.getdoc(f)
+ if isinstance(help, bytes):
+ help = help.decode("utf-8")
+ else:
+ help = inspect.cleandoc(help)
+ attrs["help"] = help
+ _check_for_unicode_literals()
+ return cls(
+ name=name or f.__name__.lower().replace("_", "-"),
+ callback=f,
+ params=params,
+ **attrs
+ )
+
+
+def command(name=None, cls=None, **attrs):
+ r"""Creates a new :class:`Command` and uses the decorated function as
+ callback. This will also automatically attach all decorated
+ :func:`option`\s and :func:`argument`\s as parameters to the command.
+
+ The name of the command defaults to the name of the function with
+ underscores replaced by dashes. If you want to change that, you can
+ pass the intended name as the first argument.
+
+ All keyword arguments are forwarded to the underlying command class.
+
+ Once decorated the function turns into a :class:`Command` instance
+ that can be invoked as a command line utility or be attached to a
+ command :class:`Group`.
+
+ :param name: the name of the command. This defaults to the function
+ name with underscores replaced by dashes.
+ :param cls: the command class to instantiate. This defaults to
+ :class:`Command`.
+ """
+ if cls is None:
+ cls = Command
+
+ def decorator(f):
+ cmd = _make_command(f, name, attrs, cls)
+ cmd.__doc__ = f.__doc__
+ return cmd
+
+ return decorator
+
+
+def group(name=None, **attrs):
+ """Creates a new :class:`Group` with a function as callback. This
+ works otherwise the same as :func:`command` just that the `cls`
+ parameter is set to :class:`Group`.
+ """
+ attrs.setdefault("cls", Group)
+ return command(name, **attrs)
+
+
+def _param_memo(f, param):
+ if isinstance(f, Command):
+ f.params.append(param)
+ else:
+ if not hasattr(f, "__click_params__"):
+ f.__click_params__ = []
+ f.__click_params__.append(param)
+
+
+def argument(*param_decls, **attrs):
+ """Attaches an argument to the command. All positional arguments are
+ passed as parameter declarations to :class:`Argument`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Argument` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ :param cls: the argument class to instantiate. This defaults to
+ :class:`Argument`.
+ """
+
+ def decorator(f):
+ ArgumentClass = attrs.pop("cls", Argument)
+ _param_memo(f, ArgumentClass(param_decls, **attrs))
+ return f
+
+ return decorator
+
+
+def option(*param_decls, **attrs):
+ """Attaches an option to the command. All positional arguments are
+ passed as parameter declarations to :class:`Option`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Option` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ :param cls: the option class to instantiate. This defaults to
+ :class:`Option`.
+ """
+
+ def decorator(f):
+ # Issue 926, copy attrs, so pre-defined options can re-use the same cls=
+ option_attrs = attrs.copy()
+
+ if "help" in option_attrs:
+ option_attrs["help"] = inspect.cleandoc(option_attrs["help"])
+ OptionClass = option_attrs.pop("cls", Option)
+ _param_memo(f, OptionClass(param_decls, **option_attrs))
+ return f
+
+ return decorator
+
+
+def confirmation_option(*param_decls, **attrs):
+ """Shortcut for confirmation prompts that can be ignored by passing
+ ``--yes`` as parameter.
+
+ This is equivalent to decorating a function with :func:`option` with
+ the following parameters::
+
+ def callback(ctx, param, value):
+ if not value:
+ ctx.abort()
+
+ @click.command()
+ @click.option('--yes', is_flag=True, callback=callback,
+ expose_value=False, prompt='Do you want to continue?')
+ def dropdb():
+ pass
+ """
+
+ def decorator(f):
+ def callback(ctx, param, value):
+ if not value:
+ ctx.abort()
+
+ attrs.setdefault("is_flag", True)
+ attrs.setdefault("callback", callback)
+ attrs.setdefault("expose_value", False)
+ attrs.setdefault("prompt", "Do you want to continue?")
+ attrs.setdefault("help", "Confirm the action without prompting.")
+ return option(*(param_decls or ("--yes",)), **attrs)(f)
+
+ return decorator
+
+
+def password_option(*param_decls, **attrs):
+ """Shortcut for password prompts.
+
+ This is equivalent to decorating a function with :func:`option` with
+ the following parameters::
+
+ @click.command()
+ @click.option('--password', prompt=True, confirmation_prompt=True,
+ hide_input=True)
+ def changeadmin(password):
+ pass
+ """
+
+ def decorator(f):
+ attrs.setdefault("prompt", True)
+ attrs.setdefault("confirmation_prompt", True)
+ attrs.setdefault("hide_input", True)
+ return option(*(param_decls or ("--password",)), **attrs)(f)
+
+ return decorator
+
+
+def version_option(version=None, *param_decls, **attrs):
+ """Adds a ``--version`` option which immediately ends the program
+ printing out the version number. This is implemented as an eager
+ option that prints the version and exits the program in the callback.
+
+ :param version: the version number to show. If not provided Click
+ attempts an auto discovery via setuptools.
+ :param prog_name: the name of the program (defaults to autodetection)
+ :param message: custom message to show instead of the default
+ (``'%(prog)s, version %(version)s'``)
+ :param others: everything else is forwarded to :func:`option`.
+ """
+ if version is None:
+ if hasattr(sys, "_getframe"):
+ module = sys._getframe(1).f_globals.get("__name__")
+ else:
+ module = ""
+
+ def decorator(f):
+ prog_name = attrs.pop("prog_name", None)
+ message = attrs.pop("message", "%(prog)s, version %(version)s")
+
+ def callback(ctx, param, value):
+ if not value or ctx.resilient_parsing:
+ return
+ prog = prog_name
+ if prog is None:
+ prog = ctx.find_root().info_name
+ ver = version
+ if ver is None:
+ try:
+ import pkg_resources
+ except ImportError:
+ pass
+ else:
+ for dist in pkg_resources.working_set:
+ scripts = dist.get_entry_map().get("console_scripts") or {}
+ for _, entry_point in iteritems(scripts):
+ if entry_point.module_name == module:
+ ver = dist.version
+ break
+ if ver is None:
+ raise RuntimeError("Could not determine version")
+ echo(message % {"prog": prog, "version": ver}, color=ctx.color)
+ ctx.exit()
+
+ attrs.setdefault("is_flag", True)
+ attrs.setdefault("expose_value", False)
+ attrs.setdefault("is_eager", True)
+ attrs.setdefault("help", "Show the version and exit.")
+ attrs["callback"] = callback
+ return option(*(param_decls or ("--version",)), **attrs)(f)
+
+ return decorator
+
+
+def help_option(*param_decls, **attrs):
+ """Adds a ``--help`` option which immediately ends the program
+ printing out the help page. This is usually unnecessary to add as
+ this is added by default to all commands unless suppressed.
+
+ Like :func:`version_option`, this is implemented as eager option that
+ prints in the callback and exits.
+
+ All arguments are forwarded to :func:`option`.
+ """
+
+ def decorator(f):
+ def callback(ctx, param, value):
+ if value and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ attrs.setdefault("is_flag", True)
+ attrs.setdefault("expose_value", False)
+ attrs.setdefault("help", "Show this message and exit.")
+ attrs.setdefault("is_eager", True)
+ attrs["callback"] = callback
+ return option(*(param_decls or ("--help",)), **attrs)(f)
+
+ return decorator
diff --git a/matteo_env/Lib/site-packages/click/exceptions.py b/matteo_env/Lib/site-packages/click/exceptions.py
new file mode 100644
index 0000000..592ee38
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/exceptions.py
@@ -0,0 +1,253 @@
+from ._compat import filename_to_ui
+from ._compat import get_text_stderr
+from ._compat import PY2
+from .utils import echo
+
+
+def _join_param_hints(param_hint):
+ if isinstance(param_hint, (tuple, list)):
+ return " / ".join(repr(x) for x in param_hint)
+ return param_hint
+
+
+class ClickException(Exception):
+ """An exception that Click can handle and show to the user."""
+
+ #: The exit code for this exception
+ exit_code = 1
+
+ def __init__(self, message):
+ ctor_msg = message
+ if PY2:
+ if ctor_msg is not None:
+ ctor_msg = ctor_msg.encode("utf-8")
+ Exception.__init__(self, ctor_msg)
+ self.message = message
+
+ def format_message(self):
+ return self.message
+
+ def __str__(self):
+ return self.message
+
+ if PY2:
+ __unicode__ = __str__
+
+ def __str__(self):
+ return self.message.encode("utf-8")
+
+ def show(self, file=None):
+ if file is None:
+ file = get_text_stderr()
+ echo("Error: {}".format(self.format_message()), file=file)
+
+
+class UsageError(ClickException):
+ """An internal exception that signals a usage error. This typically
+ aborts any further handling.
+
+ :param message: the error message to display.
+ :param ctx: optionally the context that caused this error. Click will
+ fill in the context automatically in some situations.
+ """
+
+ exit_code = 2
+
+ def __init__(self, message, ctx=None):
+ ClickException.__init__(self, message)
+ self.ctx = ctx
+ self.cmd = self.ctx.command if self.ctx else None
+
+ def show(self, file=None):
+ if file is None:
+ file = get_text_stderr()
+ color = None
+ hint = ""
+ if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None:
+ hint = "Try '{} {}' for help.\n".format(
+ self.ctx.command_path, self.ctx.help_option_names[0]
+ )
+ if self.ctx is not None:
+ color = self.ctx.color
+ echo("{}\n{}".format(self.ctx.get_usage(), hint), file=file, color=color)
+ echo("Error: {}".format(self.format_message()), file=file, color=color)
+
+
+class BadParameter(UsageError):
+ """An exception that formats out a standardized error message for a
+ bad parameter. This is useful when thrown from a callback or type as
+ Click will attach contextual information to it (for instance, which
+ parameter it is).
+
+ .. versionadded:: 2.0
+
+ :param param: the parameter object that caused this error. This can
+ be left out, and Click will attach this info itself
+ if possible.
+ :param param_hint: a string that shows up as parameter name. This
+ can be used as alternative to `param` in cases
+ where custom validation should happen. If it is
+ a string it's used as such, if it's a list then
+ each item is quoted and separated.
+ """
+
+ def __init__(self, message, ctx=None, param=None, param_hint=None):
+ UsageError.__init__(self, message, ctx)
+ self.param = param
+ self.param_hint = param_hint
+
+ def format_message(self):
+ if self.param_hint is not None:
+ param_hint = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx)
+ else:
+ return "Invalid value: {}".format(self.message)
+ param_hint = _join_param_hints(param_hint)
+
+ return "Invalid value for {}: {}".format(param_hint, self.message)
+
+
+class MissingParameter(BadParameter):
+ """Raised if click required an option or argument but it was not
+ provided when invoking the script.
+
+ .. versionadded:: 4.0
+
+ :param param_type: a string that indicates the type of the parameter.
+ The default is to inherit the parameter type from
+ the given `param`. Valid values are ``'parameter'``,
+ ``'option'`` or ``'argument'``.
+ """
+
+ def __init__(
+ self, message=None, ctx=None, param=None, param_hint=None, param_type=None
+ ):
+ BadParameter.__init__(self, message, ctx, param, param_hint)
+ self.param_type = param_type
+
+ def format_message(self):
+ if self.param_hint is not None:
+ param_hint = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx)
+ else:
+ param_hint = None
+ param_hint = _join_param_hints(param_hint)
+
+ param_type = self.param_type
+ if param_type is None and self.param is not None:
+ param_type = self.param.param_type_name
+
+ msg = self.message
+ if self.param is not None:
+ msg_extra = self.param.type.get_missing_message(self.param)
+ if msg_extra:
+ if msg:
+ msg += ". {}".format(msg_extra)
+ else:
+ msg = msg_extra
+
+ return "Missing {}{}{}{}".format(
+ param_type,
+ " {}".format(param_hint) if param_hint else "",
+ ". " if msg else ".",
+ msg or "",
+ )
+
+ def __str__(self):
+ if self.message is None:
+ param_name = self.param.name if self.param else None
+ return "missing parameter: {}".format(param_name)
+ else:
+ return self.message
+
+ if PY2:
+ __unicode__ = __str__
+
+ def __str__(self):
+ return self.__unicode__().encode("utf-8")
+
+
+class NoSuchOption(UsageError):
+ """Raised if click attempted to handle an option that does not
+ exist.
+
+ .. versionadded:: 4.0
+ """
+
+ def __init__(self, option_name, message=None, possibilities=None, ctx=None):
+ if message is None:
+ message = "no such option: {}".format(option_name)
+ UsageError.__init__(self, message, ctx)
+ self.option_name = option_name
+ self.possibilities = possibilities
+
+ def format_message(self):
+ bits = [self.message]
+ if self.possibilities:
+ if len(self.possibilities) == 1:
+ bits.append("Did you mean {}?".format(self.possibilities[0]))
+ else:
+ possibilities = sorted(self.possibilities)
+ bits.append("(Possible options: {})".format(", ".join(possibilities)))
+ return " ".join(bits)
+
+
+class BadOptionUsage(UsageError):
+ """Raised if an option is generally supplied but the use of the option
+ was incorrect. This is for instance raised if the number of arguments
+ for an option is not correct.
+
+ .. versionadded:: 4.0
+
+ :param option_name: the name of the option being used incorrectly.
+ """
+
+ def __init__(self, option_name, message, ctx=None):
+ UsageError.__init__(self, message, ctx)
+ self.option_name = option_name
+
+
+class BadArgumentUsage(UsageError):
+ """Raised if an argument is generally supplied but the use of the argument
+ was incorrect. This is for instance raised if the number of values
+ for an argument is not correct.
+
+ .. versionadded:: 6.0
+ """
+
+ def __init__(self, message, ctx=None):
+ UsageError.__init__(self, message, ctx)
+
+
+class FileError(ClickException):
+ """Raised if a file cannot be opened."""
+
+ def __init__(self, filename, hint=None):
+ ui_filename = filename_to_ui(filename)
+ if hint is None:
+ hint = "unknown error"
+ ClickException.__init__(self, hint)
+ self.ui_filename = ui_filename
+ self.filename = filename
+
+ def format_message(self):
+ return "Could not open file {}: {}".format(self.ui_filename, self.message)
+
+
+class Abort(RuntimeError):
+ """An internal signalling exception that signals Click to abort."""
+
+
+class Exit(RuntimeError):
+ """An exception that indicates that the application should exit with some
+ status code.
+
+ :param code: the status code to exit with.
+ """
+
+ __slots__ = ("exit_code",)
+
+ def __init__(self, code=0):
+ self.exit_code = code
diff --git a/matteo_env/Lib/site-packages/click/formatting.py b/matteo_env/Lib/site-packages/click/formatting.py
new file mode 100644
index 0000000..319c7f6
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/formatting.py
@@ -0,0 +1,283 @@
+from contextlib import contextmanager
+
+from ._compat import term_len
+from .parser import split_opt
+from .termui import get_terminal_size
+
+# Can force a width. This is used by the test system
+FORCED_WIDTH = None
+
+
+def measure_table(rows):
+ widths = {}
+ for row in rows:
+ for idx, col in enumerate(row):
+ widths[idx] = max(widths.get(idx, 0), term_len(col))
+ return tuple(y for x, y in sorted(widths.items()))
+
+
+def iter_rows(rows, col_count):
+ for row in rows:
+ row = tuple(row)
+ yield row + ("",) * (col_count - len(row))
+
+
+def wrap_text(
+ text, width=78, initial_indent="", subsequent_indent="", preserve_paragraphs=False
+):
+ """A helper function that intelligently wraps text. By default, it
+ assumes that it operates on a single paragraph of text but if the
+ `preserve_paragraphs` parameter is provided it will intelligently
+ handle paragraphs (defined by two empty lines).
+
+ If paragraphs are handled, a paragraph can be prefixed with an empty
+ line containing the ``\\b`` character (``\\x08``) to indicate that
+ no rewrapping should happen in that block.
+
+ :param text: the text that should be rewrapped.
+ :param width: the maximum width for the text.
+ :param initial_indent: the initial indent that should be placed on the
+ first line as a string.
+ :param subsequent_indent: the indent string that should be placed on
+ each consecutive line.
+ :param preserve_paragraphs: if this flag is set then the wrapping will
+ intelligently handle paragraphs.
+ """
+ from ._textwrap import TextWrapper
+
+ text = text.expandtabs()
+ wrapper = TextWrapper(
+ width,
+ initial_indent=initial_indent,
+ subsequent_indent=subsequent_indent,
+ replace_whitespace=False,
+ )
+ if not preserve_paragraphs:
+ return wrapper.fill(text)
+
+ p = []
+ buf = []
+ indent = None
+
+ def _flush_par():
+ if not buf:
+ return
+ if buf[0].strip() == "\b":
+ p.append((indent or 0, True, "\n".join(buf[1:])))
+ else:
+ p.append((indent or 0, False, " ".join(buf)))
+ del buf[:]
+
+ for line in text.splitlines():
+ if not line:
+ _flush_par()
+ indent = None
+ else:
+ if indent is None:
+ orig_len = term_len(line)
+ line = line.lstrip()
+ indent = orig_len - term_len(line)
+ buf.append(line)
+ _flush_par()
+
+ rv = []
+ for indent, raw, text in p:
+ with wrapper.extra_indent(" " * indent):
+ if raw:
+ rv.append(wrapper.indent_only(text))
+ else:
+ rv.append(wrapper.fill(text))
+
+ return "\n\n".join(rv)
+
+
+class HelpFormatter(object):
+ """This class helps with formatting text-based help pages. It's
+ usually just needed for very special internal cases, but it's also
+ exposed so that developers can write their own fancy outputs.
+
+ At present, it always writes into memory.
+
+ :param indent_increment: the additional increment for each level.
+ :param width: the width for the text. This defaults to the terminal
+ width clamped to a maximum of 78.
+ """
+
+ def __init__(self, indent_increment=2, width=None, max_width=None):
+ self.indent_increment = indent_increment
+ if max_width is None:
+ max_width = 80
+ if width is None:
+ width = FORCED_WIDTH
+ if width is None:
+ width = max(min(get_terminal_size()[0], max_width) - 2, 50)
+ self.width = width
+ self.current_indent = 0
+ self.buffer = []
+
+ def write(self, string):
+ """Writes a unicode string into the internal buffer."""
+ self.buffer.append(string)
+
+ def indent(self):
+ """Increases the indentation."""
+ self.current_indent += self.indent_increment
+
+ def dedent(self):
+ """Decreases the indentation."""
+ self.current_indent -= self.indent_increment
+
+ def write_usage(self, prog, args="", prefix="Usage: "):
+ """Writes a usage line into the buffer.
+
+ :param prog: the program name.
+ :param args: whitespace separated list of arguments.
+ :param prefix: the prefix for the first line.
+ """
+ usage_prefix = "{:>{w}}{} ".format(prefix, prog, w=self.current_indent)
+ text_width = self.width - self.current_indent
+
+ if text_width >= (term_len(usage_prefix) + 20):
+ # The arguments will fit to the right of the prefix.
+ indent = " " * term_len(usage_prefix)
+ self.write(
+ wrap_text(
+ args,
+ text_width,
+ initial_indent=usage_prefix,
+ subsequent_indent=indent,
+ )
+ )
+ else:
+ # The prefix is too long, put the arguments on the next line.
+ self.write(usage_prefix)
+ self.write("\n")
+ indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
+ self.write(
+ wrap_text(
+ args, text_width, initial_indent=indent, subsequent_indent=indent
+ )
+ )
+
+ self.write("\n")
+
+ def write_heading(self, heading):
+ """Writes a heading into the buffer."""
+ self.write("{:>{w}}{}:\n".format("", heading, w=self.current_indent))
+
+ def write_paragraph(self):
+ """Writes a paragraph into the buffer."""
+ if self.buffer:
+ self.write("\n")
+
+ def write_text(self, text):
+ """Writes re-indented text into the buffer. This rewraps and
+ preserves paragraphs.
+ """
+ text_width = max(self.width - self.current_indent, 11)
+ indent = " " * self.current_indent
+ self.write(
+ wrap_text(
+ text,
+ text_width,
+ initial_indent=indent,
+ subsequent_indent=indent,
+ preserve_paragraphs=True,
+ )
+ )
+ self.write("\n")
+
+ def write_dl(self, rows, col_max=30, col_spacing=2):
+ """Writes a definition list into the buffer. This is how options
+ and commands are usually formatted.
+
+ :param rows: a list of two item tuples for the terms and values.
+ :param col_max: the maximum width of the first column.
+ :param col_spacing: the number of spaces between the first and
+ second column.
+ """
+ rows = list(rows)
+ widths = measure_table(rows)
+ if len(widths) != 2:
+ raise TypeError("Expected two columns for definition list")
+
+ first_col = min(widths[0], col_max) + col_spacing
+
+ for first, second in iter_rows(rows, len(widths)):
+ self.write("{:>{w}}{}".format("", first, w=self.current_indent))
+ if not second:
+ self.write("\n")
+ continue
+ if term_len(first) <= first_col - col_spacing:
+ self.write(" " * (first_col - term_len(first)))
+ else:
+ self.write("\n")
+ self.write(" " * (first_col + self.current_indent))
+
+ text_width = max(self.width - first_col - 2, 10)
+ wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
+ lines = wrapped_text.splitlines()
+
+ if lines:
+ self.write("{}\n".format(lines[0]))
+
+ for line in lines[1:]:
+ self.write(
+ "{:>{w}}{}\n".format(
+ "", line, w=first_col + self.current_indent
+ )
+ )
+
+ if len(lines) > 1:
+ # separate long help from next option
+ self.write("\n")
+ else:
+ self.write("\n")
+
+ @contextmanager
+ def section(self, name):
+ """Helpful context manager that writes a paragraph, a heading,
+ and the indents.
+
+ :param name: the section name that is written as heading.
+ """
+ self.write_paragraph()
+ self.write_heading(name)
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ @contextmanager
+ def indentation(self):
+ """A context manager that increases the indentation."""
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ def getvalue(self):
+ """Returns the buffer contents."""
+ return "".join(self.buffer)
+
+
+def join_options(options):
+ """Given a list of option strings this joins them in the most appropriate
+ way and returns them in the form ``(formatted_string,
+ any_prefix_is_slash)`` where the second item in the tuple is a flag that
+ indicates if any of the option prefixes was a slash.
+ """
+ rv = []
+ any_prefix_is_slash = False
+ for opt in options:
+ prefix = split_opt(opt)[0]
+ if prefix == "/":
+ any_prefix_is_slash = True
+ rv.append((len(prefix), opt))
+
+ rv.sort(key=lambda x: x[0])
+
+ rv = ", ".join(x[1] for x in rv)
+ return rv, any_prefix_is_slash
diff --git a/matteo_env/Lib/site-packages/click/globals.py b/matteo_env/Lib/site-packages/click/globals.py
new file mode 100644
index 0000000..1649f9a
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/globals.py
@@ -0,0 +1,47 @@
+from threading import local
+
+_local = local()
+
+
+def get_current_context(silent=False):
+ """Returns the current click context. This can be used as a way to
+ access the current context object from anywhere. This is a more implicit
+ alternative to the :func:`pass_context` decorator. This function is
+ primarily useful for helpers such as :func:`echo` which might be
+ interested in changing its behavior based on the current context.
+
+ To push the current context, :meth:`Context.scope` can be used.
+
+ .. versionadded:: 5.0
+
+ :param silent: if set to `True` the return value is `None` if no context
+ is available. The default behavior is to raise a
+ :exc:`RuntimeError`.
+ """
+ try:
+ return _local.stack[-1]
+ except (AttributeError, IndexError):
+ if not silent:
+ raise RuntimeError("There is no active click context.")
+
+
+def push_context(ctx):
+ """Pushes a new context to the current stack."""
+ _local.__dict__.setdefault("stack", []).append(ctx)
+
+
+def pop_context():
+ """Removes the top level from the stack."""
+ _local.stack.pop()
+
+
+def resolve_color_default(color=None):
+ """"Internal helper to get the default value of the color flag. If a
+ value is passed it's returned unchanged, otherwise it's looked up from
+ the current context.
+ """
+ if color is not None:
+ return color
+ ctx = get_current_context(silent=True)
+ if ctx is not None:
+ return ctx.color
diff --git a/matteo_env/Lib/site-packages/click/parser.py b/matteo_env/Lib/site-packages/click/parser.py
new file mode 100644
index 0000000..f43ebfe
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/parser.py
@@ -0,0 +1,428 @@
+# -*- coding: utf-8 -*-
+"""
+This module started out as largely a copy paste from the stdlib's
+optparse module with the features removed that we do not need from
+optparse because we implement them in Click on a higher level (for
+instance type handling, help formatting and a lot more).
+
+The plan is to remove more and more from here over time.
+
+The reason this is a different module and not optparse from the stdlib
+is that there are differences in 2.x and 3.x about the error messages
+generated and optparse in the stdlib uses gettext for no good reason
+and might cause us issues.
+
+Click uses parts of optparse written by Gregory P. Ward and maintained
+by the Python Software Foundation. This is limited to code in parser.py.
+
+Copyright 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright 2002-2006 Python Software Foundation. All rights reserved.
+"""
+import re
+from collections import deque
+
+from .exceptions import BadArgumentUsage
+from .exceptions import BadOptionUsage
+from .exceptions import NoSuchOption
+from .exceptions import UsageError
+
+
+def _unpack_args(args, nargs_spec):
+ """Given an iterable of arguments and an iterable of nargs specifications,
+ it returns a tuple with all the unpacked arguments at the first index
+ and all remaining arguments as the second.
+
+ The nargs specification is the number of arguments that should be consumed
+ or `-1` to indicate that this position should eat up all the remainders.
+
+ Missing items are filled with `None`.
+ """
+ args = deque(args)
+ nargs_spec = deque(nargs_spec)
+ rv = []
+ spos = None
+
+ def _fetch(c):
+ try:
+ if spos is None:
+ return c.popleft()
+ else:
+ return c.pop()
+ except IndexError:
+ return None
+
+ while nargs_spec:
+ nargs = _fetch(nargs_spec)
+ if nargs == 1:
+ rv.append(_fetch(args))
+ elif nargs > 1:
+ x = [_fetch(args) for _ in range(nargs)]
+ # If we're reversed, we're pulling in the arguments in reverse,
+ # so we need to turn them around.
+ if spos is not None:
+ x.reverse()
+ rv.append(tuple(x))
+ elif nargs < 0:
+ if spos is not None:
+ raise TypeError("Cannot have two nargs < 0")
+ spos = len(rv)
+ rv.append(None)
+
+ # spos is the position of the wildcard (star). If it's not `None`,
+ # we fill it with the remainder.
+ if spos is not None:
+ rv[spos] = tuple(args)
+ args = []
+ rv[spos + 1 :] = reversed(rv[spos + 1 :])
+
+ return tuple(rv), list(args)
+
+
+def _error_opt_args(nargs, opt):
+ if nargs == 1:
+ raise BadOptionUsage(opt, "{} option requires an argument".format(opt))
+ raise BadOptionUsage(opt, "{} option requires {} arguments".format(opt, nargs))
+
+
+def split_opt(opt):
+ first = opt[:1]
+ if first.isalnum():
+ return "", opt
+ if opt[1:2] == first:
+ return opt[:2], opt[2:]
+ return first, opt[1:]
+
+
+def normalize_opt(opt, ctx):
+ if ctx is None or ctx.token_normalize_func is None:
+ return opt
+ prefix, opt = split_opt(opt)
+ return prefix + ctx.token_normalize_func(opt)
+
+
+def split_arg_string(string):
+ """Given an argument string this attempts to split it into small parts."""
+ rv = []
+ for match in re.finditer(
+ r"('([^'\\]*(?:\\.[^'\\]*)*)'|\"([^\"\\]*(?:\\.[^\"\\]*)*)\"|\S+)\s*",
+ string,
+ re.S,
+ ):
+ arg = match.group().strip()
+ if arg[:1] == arg[-1:] and arg[:1] in "\"'":
+ arg = arg[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape")
+ try:
+ arg = type(string)(arg)
+ except UnicodeError:
+ pass
+ rv.append(arg)
+ return rv
+
+
+class Option(object):
+ def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):
+ self._short_opts = []
+ self._long_opts = []
+ self.prefixes = set()
+
+ for opt in opts:
+ prefix, value = split_opt(opt)
+ if not prefix:
+ raise ValueError("Invalid start character for option ({})".format(opt))
+ self.prefixes.add(prefix[0])
+ if len(prefix) == 1 and len(value) == 1:
+ self._short_opts.append(opt)
+ else:
+ self._long_opts.append(opt)
+ self.prefixes.add(prefix)
+
+ if action is None:
+ action = "store"
+
+ self.dest = dest
+ self.action = action
+ self.nargs = nargs
+ self.const = const
+ self.obj = obj
+
+ @property
+ def takes_value(self):
+ return self.action in ("store", "append")
+
+ def process(self, value, state):
+ if self.action == "store":
+ state.opts[self.dest] = value
+ elif self.action == "store_const":
+ state.opts[self.dest] = self.const
+ elif self.action == "append":
+ state.opts.setdefault(self.dest, []).append(value)
+ elif self.action == "append_const":
+ state.opts.setdefault(self.dest, []).append(self.const)
+ elif self.action == "count":
+ state.opts[self.dest] = state.opts.get(self.dest, 0) + 1
+ else:
+ raise ValueError("unknown action '{}'".format(self.action))
+ state.order.append(self.obj)
+
+
+class Argument(object):
+ def __init__(self, dest, nargs=1, obj=None):
+ self.dest = dest
+ self.nargs = nargs
+ self.obj = obj
+
+ def process(self, value, state):
+ if self.nargs > 1:
+ holes = sum(1 for x in value if x is None)
+ if holes == len(value):
+ value = None
+ elif holes != 0:
+ raise BadArgumentUsage(
+ "argument {} takes {} values".format(self.dest, self.nargs)
+ )
+ state.opts[self.dest] = value
+ state.order.append(self.obj)
+
+
+class ParsingState(object):
+ def __init__(self, rargs):
+ self.opts = {}
+ self.largs = []
+ self.rargs = rargs
+ self.order = []
+
+
+class OptionParser(object):
+ """The option parser is an internal class that is ultimately used to
+ parse options and arguments. It's modelled after optparse and brings
+ a similar but vastly simplified API. It should generally not be used
+ directly as the high level Click classes wrap it for you.
+
+ It's not nearly as extensible as optparse or argparse as it does not
+ implement features that are implemented on a higher level (such as
+ types or defaults).
+
+ :param ctx: optionally the :class:`~click.Context` where this parser
+ should go with.
+ """
+
+ def __init__(self, ctx=None):
+ #: The :class:`~click.Context` for this parser. This might be
+ #: `None` for some advanced use cases.
+ self.ctx = ctx
+ #: This controls how the parser deals with interspersed arguments.
+ #: If this is set to `False`, the parser will stop on the first
+ #: non-option. Click uses this to implement nested subcommands
+ #: safely.
+ self.allow_interspersed_args = True
+ #: This tells the parser how to deal with unknown options. By
+ #: default it will error out (which is sensible), but there is a
+ #: second mode where it will ignore it and continue processing
+ #: after shifting all the unknown options into the resulting args.
+ self.ignore_unknown_options = False
+ if ctx is not None:
+ self.allow_interspersed_args = ctx.allow_interspersed_args
+ self.ignore_unknown_options = ctx.ignore_unknown_options
+ self._short_opt = {}
+ self._long_opt = {}
+ self._opt_prefixes = {"-", "--"}
+ self._args = []
+
+ def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None):
+ """Adds a new option named `dest` to the parser. The destination
+ is not inferred (unlike with optparse) and needs to be explicitly
+ provided. Action can be any of ``store``, ``store_const``,
+ ``append``, ``appnd_const`` or ``count``.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ if obj is None:
+ obj = dest
+ opts = [normalize_opt(opt, self.ctx) for opt in opts]
+ option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj)
+ self._opt_prefixes.update(option.prefixes)
+ for opt in option._short_opts:
+ self._short_opt[opt] = option
+ for opt in option._long_opts:
+ self._long_opt[opt] = option
+
+ def add_argument(self, dest, nargs=1, obj=None):
+ """Adds a positional argument named `dest` to the parser.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ if obj is None:
+ obj = dest
+ self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
+
+ def parse_args(self, args):
+ """Parses positional arguments and returns ``(values, args, order)``
+ for the parsed options and arguments as well as the leftover
+ arguments if there are any. The order is a list of objects as they
+ appear on the command line. If arguments appear multiple times they
+ will be memorized multiple times as well.
+ """
+ state = ParsingState(args)
+ try:
+ self._process_args_for_options(state)
+ self._process_args_for_args(state)
+ except UsageError:
+ if self.ctx is None or not self.ctx.resilient_parsing:
+ raise
+ return state.opts, state.largs, state.order
+
+ def _process_args_for_args(self, state):
+ pargs, args = _unpack_args(
+ state.largs + state.rargs, [x.nargs for x in self._args]
+ )
+
+ for idx, arg in enumerate(self._args):
+ arg.process(pargs[idx], state)
+
+ state.largs = args
+ state.rargs = []
+
+ def _process_args_for_options(self, state):
+ while state.rargs:
+ arg = state.rargs.pop(0)
+ arglen = len(arg)
+ # Double dashes always handled explicitly regardless of what
+ # prefixes are valid.
+ if arg == "--":
+ return
+ elif arg[:1] in self._opt_prefixes and arglen > 1:
+ self._process_opts(arg, state)
+ elif self.allow_interspersed_args:
+ state.largs.append(arg)
+ else:
+ state.rargs.insert(0, arg)
+ return
+
+ # Say this is the original argument list:
+ # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+ # ^
+ # (we are about to process arg(i)).
+ #
+ # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+ # [arg0, ..., arg(i-1)] (any options and their arguments will have
+ # been removed from largs).
+ #
+ # The while loop will usually consume 1 or more arguments per pass.
+ # If it consumes 1 (eg. arg is an option that takes no arguments),
+ # then after _process_arg() is done the situation is:
+ #
+ # largs = subset of [arg0, ..., arg(i)]
+ # rargs = [arg(i+1), ..., arg(N-1)]
+ #
+ # If allow_interspersed_args is false, largs will always be
+ # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+ # not a very interesting subset!
+
+ def _match_long_opt(self, opt, explicit_value, state):
+ if opt not in self._long_opt:
+ possibilities = [word for word in self._long_opt if word.startswith(opt)]
+ raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
+
+ option = self._long_opt[opt]
+ if option.takes_value:
+ # At this point it's safe to modify rargs by injecting the
+ # explicit value, because no exception is raised in this
+ # branch. This means that the inserted value will be fully
+ # consumed.
+ if explicit_value is not None:
+ state.rargs.insert(0, explicit_value)
+
+ nargs = option.nargs
+ if len(state.rargs) < nargs:
+ _error_opt_args(nargs, opt)
+ elif nargs == 1:
+ value = state.rargs.pop(0)
+ else:
+ value = tuple(state.rargs[:nargs])
+ del state.rargs[:nargs]
+
+ elif explicit_value is not None:
+ raise BadOptionUsage(opt, "{} option does not take a value".format(opt))
+
+ else:
+ value = None
+
+ option.process(value, state)
+
+ def _match_short_opt(self, arg, state):
+ stop = False
+ i = 1
+ prefix = arg[0]
+ unknown_options = []
+
+ for ch in arg[1:]:
+ opt = normalize_opt(prefix + ch, self.ctx)
+ option = self._short_opt.get(opt)
+ i += 1
+
+ if not option:
+ if self.ignore_unknown_options:
+ unknown_options.append(ch)
+ continue
+ raise NoSuchOption(opt, ctx=self.ctx)
+ if option.takes_value:
+ # Any characters left in arg? Pretend they're the
+ # next arg, and stop consuming characters of arg.
+ if i < len(arg):
+ state.rargs.insert(0, arg[i:])
+ stop = True
+
+ nargs = option.nargs
+ if len(state.rargs) < nargs:
+ _error_opt_args(nargs, opt)
+ elif nargs == 1:
+ value = state.rargs.pop(0)
+ else:
+ value = tuple(state.rargs[:nargs])
+ del state.rargs[:nargs]
+
+ else:
+ value = None
+
+ option.process(value, state)
+
+ if stop:
+ break
+
+ # If we got any unknown options we re-combinate the string of the
+ # remaining options and re-attach the prefix, then report that
+ # to the state as new larg. This way there is basic combinatorics
+ # that can be achieved while still ignoring unknown arguments.
+ if self.ignore_unknown_options and unknown_options:
+ state.largs.append("{}{}".format(prefix, "".join(unknown_options)))
+
+ def _process_opts(self, arg, state):
+ explicit_value = None
+ # Long option handling happens in two parts. The first part is
+ # supporting explicitly attached values. In any case, we will try
+ # to long match the option first.
+ if "=" in arg:
+ long_opt, explicit_value = arg.split("=", 1)
+ else:
+ long_opt = arg
+ norm_long_opt = normalize_opt(long_opt, self.ctx)
+
+ # At this point we will match the (assumed) long option through
+ # the long option matching code. Note that this allows options
+ # like "-foo" to be matched as long options.
+ try:
+ self._match_long_opt(norm_long_opt, explicit_value, state)
+ except NoSuchOption:
+ # At this point the long option matching failed, and we need
+ # to try with short options. However there is a special rule
+ # which says, that if we have a two character options prefix
+ # (applies to "--foo" for instance), we do not dispatch to the
+ # short option code and will instead raise the no option
+ # error.
+ if arg[:2] not in self._opt_prefixes:
+ return self._match_short_opt(arg, state)
+ if not self.ignore_unknown_options:
+ raise
+ state.largs.append(arg)
diff --git a/matteo_env/Lib/site-packages/click/termui.py b/matteo_env/Lib/site-packages/click/termui.py
new file mode 100644
index 0000000..02ef9e9
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/termui.py
@@ -0,0 +1,681 @@
+import inspect
+import io
+import itertools
+import os
+import struct
+import sys
+
+from ._compat import DEFAULT_COLUMNS
+from ._compat import get_winterm_size
+from ._compat import isatty
+from ._compat import raw_input
+from ._compat import string_types
+from ._compat import strip_ansi
+from ._compat import text_type
+from ._compat import WIN
+from .exceptions import Abort
+from .exceptions import UsageError
+from .globals import resolve_color_default
+from .types import Choice
+from .types import convert_type
+from .types import Path
+from .utils import echo
+from .utils import LazyFile
+
+# The prompt functions to use. The doc tools currently override these
+# functions to customize how they work.
+visible_prompt_func = raw_input
+
+_ansi_colors = {
+ "black": 30,
+ "red": 31,
+ "green": 32,
+ "yellow": 33,
+ "blue": 34,
+ "magenta": 35,
+ "cyan": 36,
+ "white": 37,
+ "reset": 39,
+ "bright_black": 90,
+ "bright_red": 91,
+ "bright_green": 92,
+ "bright_yellow": 93,
+ "bright_blue": 94,
+ "bright_magenta": 95,
+ "bright_cyan": 96,
+ "bright_white": 97,
+}
+_ansi_reset_all = "\033[0m"
+
+
+def hidden_prompt_func(prompt):
+ import getpass
+
+ return getpass.getpass(prompt)
+
+
+def _build_prompt(
+ text, suffix, show_default=False, default=None, show_choices=True, type=None
+):
+ prompt = text
+ if type is not None and show_choices and isinstance(type, Choice):
+ prompt += " ({})".format(", ".join(map(str, type.choices)))
+ if default is not None and show_default:
+ prompt = "{} [{}]".format(prompt, _format_default(default))
+ return prompt + suffix
+
+
+def _format_default(default):
+ if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
+ return default.name
+
+ return default
+
+
+def prompt(
+ text,
+ default=None,
+ hide_input=False,
+ confirmation_prompt=False,
+ type=None,
+ value_proc=None,
+ prompt_suffix=": ",
+ show_default=True,
+ err=False,
+ show_choices=True,
+):
+ """Prompts a user for input. This is a convenience function that can
+ be used to prompt a user for input later.
+
+ If the user aborts the input by sending a interrupt signal, this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ .. versionadded:: 7.0
+ Added the show_choices parameter.
+
+ .. versionadded:: 6.0
+ Added unicode support for cmd.exe on Windows.
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param text: the text to show for the prompt.
+ :param default: the default value to use if no input happens. If this
+ is not given it will prompt until it's aborted.
+ :param hide_input: if this is set to true then the input value will
+ be hidden.
+ :param confirmation_prompt: asks for confirmation for the value.
+ :param type: the type to use to check the value against.
+ :param value_proc: if this parameter is provided it's a function that
+ is invoked instead of the type conversion to
+ convert a value.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ :param show_choices: Show or hide choices if the passed type is a Choice.
+ For example if type is a Choice of either day or week,
+ show_choices is true and text is "Group by" then the
+ prompt will be "Group by (day, week): ".
+ """
+ result = None
+
+ def prompt_func(text):
+ f = hidden_prompt_func if hide_input else visible_prompt_func
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(text, nl=False, err=err)
+ return f("")
+ except (KeyboardInterrupt, EOFError):
+ # getpass doesn't print a newline if the user aborts input with ^C.
+ # Allegedly this behavior is inherited from getpass(3).
+ # A doc bug has been filed at https://bugs.python.org/issue24711
+ if hide_input:
+ echo(None, err=err)
+ raise Abort()
+
+ if value_proc is None:
+ value_proc = convert_type(type, default)
+
+ prompt = _build_prompt(
+ text, prompt_suffix, show_default, default, show_choices, type
+ )
+
+ while 1:
+ while 1:
+ value = prompt_func(prompt)
+ if value:
+ break
+ elif default is not None:
+ if isinstance(value_proc, Path):
+ # validate Path default value(exists, dir_okay etc.)
+ value = default
+ break
+ return default
+ try:
+ result = value_proc(value)
+ except UsageError as e:
+ echo("Error: {}".format(e.message), err=err) # noqa: B306
+ continue
+ if not confirmation_prompt:
+ return result
+ while 1:
+ value2 = prompt_func("Repeat for confirmation: ")
+ if value2:
+ break
+ if value == value2:
+ return result
+ echo("Error: the two entered values do not match", err=err)
+
+
+def confirm(
+ text, default=False, abort=False, prompt_suffix=": ", show_default=True, err=False
+):
+ """Prompts for confirmation (yes/no question).
+
+ If the user aborts the input by sending a interrupt signal this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param text: the question to ask.
+ :param default: the default for the prompt.
+ :param abort: if this is set to `True` a negative answer aborts the
+ exception by raising :exc:`Abort`.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ """
+ prompt = _build_prompt(
+ text, prompt_suffix, show_default, "Y/n" if default else "y/N"
+ )
+ while 1:
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(prompt, nl=False, err=err)
+ value = visible_prompt_func("").lower().strip()
+ except (KeyboardInterrupt, EOFError):
+ raise Abort()
+ if value in ("y", "yes"):
+ rv = True
+ elif value in ("n", "no"):
+ rv = False
+ elif value == "":
+ rv = default
+ else:
+ echo("Error: invalid input", err=err)
+ continue
+ break
+ if abort and not rv:
+ raise Abort()
+ return rv
+
+
+def get_terminal_size():
+ """Returns the current size of the terminal as tuple in the form
+ ``(width, height)`` in columns and rows.
+ """
+ # If shutil has get_terminal_size() (Python 3.3 and later) use that
+ if sys.version_info >= (3, 3):
+ import shutil
+
+ shutil_get_terminal_size = getattr(shutil, "get_terminal_size", None)
+ if shutil_get_terminal_size:
+ sz = shutil_get_terminal_size()
+ return sz.columns, sz.lines
+
+ # We provide a sensible default for get_winterm_size() when being invoked
+ # inside a subprocess. Without this, it would not provide a useful input.
+ if get_winterm_size is not None:
+ size = get_winterm_size()
+ if size == (0, 0):
+ return (79, 24)
+ else:
+ return size
+
+ def ioctl_gwinsz(fd):
+ try:
+ import fcntl
+ import termios
+
+ cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
+ except Exception:
+ return
+ return cr
+
+ cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
+ if not cr:
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ try:
+ cr = ioctl_gwinsz(fd)
+ finally:
+ os.close(fd)
+ except Exception:
+ pass
+ if not cr or not cr[0] or not cr[1]:
+ cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", DEFAULT_COLUMNS))
+ return int(cr[1]), int(cr[0])
+
+
+def echo_via_pager(text_or_generator, color=None):
+ """This function takes a text and shows it via an environment specific
+ pager on stdout.
+
+ .. versionchanged:: 3.0
+ Added the `color` flag.
+
+ :param text_or_generator: the text to page, or alternatively, a
+ generator emitting the text to page.
+ :param color: controls if the pager supports ANSI colors or not. The
+ default is autodetection.
+ """
+ color = resolve_color_default(color)
+
+ if inspect.isgeneratorfunction(text_or_generator):
+ i = text_or_generator()
+ elif isinstance(text_or_generator, string_types):
+ i = [text_or_generator]
+ else:
+ i = iter(text_or_generator)
+
+ # convert every element of i to a text type if necessary
+ text_generator = (el if isinstance(el, string_types) else text_type(el) for el in i)
+
+ from ._termui_impl import pager
+
+ return pager(itertools.chain(text_generator, "\n"), color)
+
+
+def progressbar(
+ iterable=None,
+ length=None,
+ label=None,
+ show_eta=True,
+ show_percent=None,
+ show_pos=False,
+ item_show_func=None,
+ fill_char="#",
+ empty_char="-",
+ bar_template="%(label)s [%(bar)s] %(info)s",
+ info_sep=" ",
+ width=36,
+ file=None,
+ color=None,
+):
+ """This function creates an iterable context manager that can be used
+ to iterate over something while showing a progress bar. It will
+ either iterate over the `iterable` or `length` items (that are counted
+ up). While iteration happens, this function will print a rendered
+ progress bar to the given `file` (defaults to stdout) and will attempt
+ to calculate remaining time and more. By default, this progress bar
+ will not be rendered if the file is not a terminal.
+
+ The context manager creates the progress bar. When the context
+ manager is entered the progress bar is already created. With every
+ iteration over the progress bar, the iterable passed to the bar is
+ advanced and the bar is updated. When the context manager exits,
+ a newline is printed and the progress bar is finalized on screen.
+
+ Note: The progress bar is currently designed for use cases where the
+ total progress can be expected to take at least several seconds.
+ Because of this, the ProgressBar class object won't display
+ progress that is considered too fast, and progress where the time
+ between steps is less than a second.
+
+ No printing must happen or the progress bar will be unintentionally
+ destroyed.
+
+ Example usage::
+
+ with progressbar(items) as bar:
+ for item in bar:
+ do_something_with(item)
+
+ Alternatively, if no iterable is specified, one can manually update the
+ progress bar through the `update()` method instead of directly
+ iterating over the progress bar. The update method accepts the number
+ of steps to increment the bar with::
+
+ with progressbar(length=chunks.total_bytes) as bar:
+ for chunk in chunks:
+ process_chunk(chunk)
+ bar.update(chunks.bytes)
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 4.0
+ Added the `color` parameter. Added a `update` method to the
+ progressbar object.
+
+ :param iterable: an iterable to iterate over. If not provided the length
+ is required.
+ :param length: the number of items to iterate over. By default the
+ progressbar will attempt to ask the iterator about its
+ length, which might or might not work. If an iterable is
+ also provided this parameter can be used to override the
+ length. If an iterable is not provided the progress bar
+ will iterate over a range of that length.
+ :param label: the label to show next to the progress bar.
+ :param show_eta: enables or disables the estimated time display. This is
+ automatically disabled if the length cannot be
+ determined.
+ :param show_percent: enables or disables the percentage display. The
+ default is `True` if the iterable has a length or
+ `False` if not.
+ :param show_pos: enables or disables the absolute position display. The
+ default is `False`.
+ :param item_show_func: a function called with the current item which
+ can return a string to show the current item
+ next to the progress bar. Note that the current
+ item can be `None`!
+ :param fill_char: the character to use to show the filled part of the
+ progress bar.
+ :param empty_char: the character to use to show the non-filled part of
+ the progress bar.
+ :param bar_template: the format string to use as template for the bar.
+ The parameters in it are ``label`` for the label,
+ ``bar`` for the progress bar and ``info`` for the
+ info section.
+ :param info_sep: the separator between multiple info items (eta etc.)
+ :param width: the width of the progress bar in characters, 0 means full
+ terminal width
+ :param file: the file to write to. If this is not a terminal then
+ only the label is printed.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are included anywhere in the progress bar output
+ which is not the case by default.
+ """
+ from ._termui_impl import ProgressBar
+
+ color = resolve_color_default(color)
+ return ProgressBar(
+ iterable=iterable,
+ length=length,
+ show_eta=show_eta,
+ show_percent=show_percent,
+ show_pos=show_pos,
+ item_show_func=item_show_func,
+ fill_char=fill_char,
+ empty_char=empty_char,
+ bar_template=bar_template,
+ info_sep=info_sep,
+ file=file,
+ label=label,
+ width=width,
+ color=color,
+ )
+
+
+def clear():
+ """Clears the terminal screen. This will have the effect of clearing
+ the whole visible space of the terminal and moving the cursor to the
+ top left. This does not do anything if not connected to a terminal.
+
+ .. versionadded:: 2.0
+ """
+ if not isatty(sys.stdout):
+ return
+ # If we're on Windows and we don't have colorama available, then we
+ # clear the screen by shelling out. Otherwise we can use an escape
+ # sequence.
+ if WIN:
+ os.system("cls")
+ else:
+ sys.stdout.write("\033[2J\033[1;1H")
+
+
+def style(
+ text,
+ fg=None,
+ bg=None,
+ bold=None,
+ dim=None,
+ underline=None,
+ blink=None,
+ reverse=None,
+ reset=True,
+):
+ """Styles a text with ANSI styles and returns the new string. By
+ default the styling is self contained which means that at the end
+ of the string a reset code is issued. This can be prevented by
+ passing ``reset=False``.
+
+ Examples::
+
+ click.echo(click.style('Hello World!', fg='green'))
+ click.echo(click.style('ATTENTION!', blink=True))
+ click.echo(click.style('Some things', reverse=True, fg='cyan'))
+
+ Supported color names:
+
+ * ``black`` (might be a gray)
+ * ``red``
+ * ``green``
+ * ``yellow`` (might be an orange)
+ * ``blue``
+ * ``magenta``
+ * ``cyan``
+ * ``white`` (might be light gray)
+ * ``bright_black``
+ * ``bright_red``
+ * ``bright_green``
+ * ``bright_yellow``
+ * ``bright_blue``
+ * ``bright_magenta``
+ * ``bright_cyan``
+ * ``bright_white``
+ * ``reset`` (reset the color code only)
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 7.0
+ Added support for bright colors.
+
+ :param text: the string to style with ansi codes.
+ :param fg: if provided this will become the foreground color.
+ :param bg: if provided this will become the background color.
+ :param bold: if provided this will enable or disable bold mode.
+ :param dim: if provided this will enable or disable dim mode. This is
+ badly supported.
+ :param underline: if provided this will enable or disable underline.
+ :param blink: if provided this will enable or disable blinking.
+ :param reverse: if provided this will enable or disable inverse
+ rendering (foreground becomes background and the
+ other way round).
+ :param reset: by default a reset-all code is added at the end of the
+ string which means that styles do not carry over. This
+ can be disabled to compose styles.
+ """
+ bits = []
+ if fg:
+ try:
+ bits.append("\033[{}m".format(_ansi_colors[fg]))
+ except KeyError:
+ raise TypeError("Unknown color '{}'".format(fg))
+ if bg:
+ try:
+ bits.append("\033[{}m".format(_ansi_colors[bg] + 10))
+ except KeyError:
+ raise TypeError("Unknown color '{}'".format(bg))
+ if bold is not None:
+ bits.append("\033[{}m".format(1 if bold else 22))
+ if dim is not None:
+ bits.append("\033[{}m".format(2 if dim else 22))
+ if underline is not None:
+ bits.append("\033[{}m".format(4 if underline else 24))
+ if blink is not None:
+ bits.append("\033[{}m".format(5 if blink else 25))
+ if reverse is not None:
+ bits.append("\033[{}m".format(7 if reverse else 27))
+ bits.append(text)
+ if reset:
+ bits.append(_ansi_reset_all)
+ return "".join(bits)
+
+
+def unstyle(text):
+ """Removes ANSI styling information from a string. Usually it's not
+ necessary to use this function as Click's echo function will
+ automatically remove styling if necessary.
+
+ .. versionadded:: 2.0
+
+ :param text: the text to remove style information from.
+ """
+ return strip_ansi(text)
+
+
+def secho(message=None, file=None, nl=True, err=False, color=None, **styles):
+ """This function combines :func:`echo` and :func:`style` into one
+ call. As such the following two calls are the same::
+
+ click.secho('Hello World!', fg='green')
+ click.echo(click.style('Hello World!', fg='green'))
+
+ All keyword arguments are forwarded to the underlying functions
+ depending on which one they go with.
+
+ .. versionadded:: 2.0
+ """
+ if message is not None:
+ message = style(message, **styles)
+ return echo(message, file=file, nl=nl, err=err, color=color)
+
+
+def edit(
+ text=None, editor=None, env=None, require_save=True, extension=".txt", filename=None
+):
+ r"""Edits the given text in the defined editor. If an editor is given
+ (should be the full path to the executable but the regular operating
+ system search path is used for finding the executable) it overrides
+ the detected editor. Optionally, some environment variables can be
+ used. If the editor is closed without changes, `None` is returned. In
+ case a file is edited directly the return value is always `None` and
+ `require_save` and `extension` are ignored.
+
+ If the editor cannot be opened a :exc:`UsageError` is raised.
+
+ Note for Windows: to simplify cross-platform usage, the newlines are
+ automatically converted from POSIX to Windows and vice versa. As such,
+ the message here will have ``\n`` as newline markers.
+
+ :param text: the text to edit.
+ :param editor: optionally the editor to use. Defaults to automatic
+ detection.
+ :param env: environment variables to forward to the editor.
+ :param require_save: if this is true, then not saving in the editor
+ will make the return value become `None`.
+ :param extension: the extension to tell the editor about. This defaults
+ to `.txt` but changing this might change syntax
+ highlighting.
+ :param filename: if provided it will edit this file instead of the
+ provided text contents. It will not use a temporary
+ file as an indirection in that case.
+ """
+ from ._termui_impl import Editor
+
+ editor = Editor(
+ editor=editor, env=env, require_save=require_save, extension=extension
+ )
+ if filename is None:
+ return editor.edit(text)
+ editor.edit_file(filename)
+
+
+def launch(url, wait=False, locate=False):
+ """This function launches the given URL (or filename) in the default
+ viewer application for this file type. If this is an executable, it
+ might launch the executable in a new session. The return value is
+ the exit code of the launched application. Usually, ``0`` indicates
+ success.
+
+ Examples::
+
+ click.launch('https://click.palletsprojects.com/')
+ click.launch('/my/downloaded/file', locate=True)
+
+ .. versionadded:: 2.0
+
+ :param url: URL or filename of the thing to launch.
+ :param wait: waits for the program to stop.
+ :param locate: if this is set to `True` then instead of launching the
+ application associated with the URL it will attempt to
+ launch a file manager with the file located. This
+ might have weird effects if the URL does not point to
+ the filesystem.
+ """
+ from ._termui_impl import open_url
+
+ return open_url(url, wait=wait, locate=locate)
+
+
+# If this is provided, getchar() calls into this instead. This is used
+# for unittesting purposes.
+_getchar = None
+
+
+def getchar(echo=False):
+ """Fetches a single character from the terminal and returns it. This
+ will always return a unicode character and under certain rare
+ circumstances this might return more than one character. The
+ situations which more than one character is returned is when for
+ whatever reason multiple characters end up in the terminal buffer or
+ standard input was not actually a terminal.
+
+ Note that this will always read from the terminal, even if something
+ is piped into the standard input.
+
+ Note for Windows: in rare cases when typing non-ASCII characters, this
+ function might wait for a second character and then return both at once.
+ This is because certain Unicode characters look like special-key markers.
+
+ .. versionadded:: 2.0
+
+ :param echo: if set to `True`, the character read will also show up on
+ the terminal. The default is to not show it.
+ """
+ f = _getchar
+ if f is None:
+ from ._termui_impl import getchar as f
+ return f(echo)
+
+
+def raw_terminal():
+ from ._termui_impl import raw_terminal as f
+
+ return f()
+
+
+def pause(info="Press any key to continue ...", err=False):
+ """This command stops execution and waits for the user to press any
+ key to continue. This is similar to the Windows batch "pause"
+ command. If the program is not run through a terminal, this command
+ will instead do nothing.
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param info: the info string to print before pausing.
+ :param err: if set to message goes to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ """
+ if not isatty(sys.stdin) or not isatty(sys.stdout):
+ return
+ try:
+ if info:
+ echo(info, nl=False, err=err)
+ try:
+ getchar()
+ except (KeyboardInterrupt, EOFError):
+ pass
+ finally:
+ if info:
+ echo(err=err)
diff --git a/matteo_env/Lib/site-packages/click/testing.py b/matteo_env/Lib/site-packages/click/testing.py
new file mode 100644
index 0000000..a3dba3b
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/testing.py
@@ -0,0 +1,382 @@
+import contextlib
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+
+from . import formatting
+from . import termui
+from . import utils
+from ._compat import iteritems
+from ._compat import PY2
+from ._compat import string_types
+
+
+if PY2:
+ from cStringIO import StringIO
+else:
+ import io
+ from ._compat import _find_binary_reader
+
+
+class EchoingStdin(object):
+ def __init__(self, input, output):
+ self._input = input
+ self._output = output
+
+ def __getattr__(self, x):
+ return getattr(self._input, x)
+
+ def _echo(self, rv):
+ self._output.write(rv)
+ return rv
+
+ def read(self, n=-1):
+ return self._echo(self._input.read(n))
+
+ def readline(self, n=-1):
+ return self._echo(self._input.readline(n))
+
+ def readlines(self):
+ return [self._echo(x) for x in self._input.readlines()]
+
+ def __iter__(self):
+ return iter(self._echo(x) for x in self._input)
+
+ def __repr__(self):
+ return repr(self._input)
+
+
+def make_input_stream(input, charset):
+ # Is already an input stream.
+ if hasattr(input, "read"):
+ if PY2:
+ return input
+ rv = _find_binary_reader(input)
+ if rv is not None:
+ return rv
+ raise TypeError("Could not find binary reader for input stream.")
+
+ if input is None:
+ input = b""
+ elif not isinstance(input, bytes):
+ input = input.encode(charset)
+ if PY2:
+ return StringIO(input)
+ return io.BytesIO(input)
+
+
+class Result(object):
+ """Holds the captured result of an invoked CLI script."""
+
+ def __init__(
+ self, runner, stdout_bytes, stderr_bytes, exit_code, exception, exc_info=None
+ ):
+ #: The runner that created the result
+ self.runner = runner
+ #: The standard output as bytes.
+ self.stdout_bytes = stdout_bytes
+ #: The standard error as bytes, or None if not available
+ self.stderr_bytes = stderr_bytes
+ #: The exit code as integer.
+ self.exit_code = exit_code
+ #: The exception that happened if one did.
+ self.exception = exception
+ #: The traceback
+ self.exc_info = exc_info
+
+ @property
+ def output(self):
+ """The (standard) output as unicode string."""
+ return self.stdout
+
+ @property
+ def stdout(self):
+ """The standard output as unicode string."""
+ return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ @property
+ def stderr(self):
+ """The standard error as unicode string."""
+ if self.stderr_bytes is None:
+ raise ValueError("stderr not separately captured")
+ return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ def __repr__(self):
+ return "<{} {}>".format(
+ type(self).__name__, repr(self.exception) if self.exception else "okay"
+ )
+
+
+class CliRunner(object):
+ """The CLI runner provides functionality to invoke a Click command line
+ script for unittesting purposes in a isolated environment. This only
+ works in single-threaded systems without any concurrency as it changes the
+ global interpreter state.
+
+ :param charset: the character set for the input and output data. This is
+ UTF-8 by default and should not be changed currently as
+ the reporting to Click only works in Python 2 properly.
+ :param env: a dictionary with environment variables for overriding.
+ :param echo_stdin: if this is set to `True`, then reading from stdin writes
+ to stdout. This is useful for showing examples in
+ some circumstances. Note that regular prompts
+ will automatically echo the input.
+ :param mix_stderr: if this is set to `False`, then stdout and stderr are
+ preserved as independent streams. This is useful for
+ Unix-philosophy apps that have predictable stdout and
+ noisy stderr, such that each may be measured
+ independently
+ """
+
+ def __init__(self, charset=None, env=None, echo_stdin=False, mix_stderr=True):
+ if charset is None:
+ charset = "utf-8"
+ self.charset = charset
+ self.env = env or {}
+ self.echo_stdin = echo_stdin
+ self.mix_stderr = mix_stderr
+
+ def get_default_prog_name(self, cli):
+ """Given a command object it will return the default program name
+ for it. The default is the `name` attribute or ``"root"`` if not
+ set.
+ """
+ return cli.name or "root"
+
+ def make_env(self, overrides=None):
+ """Returns the environment overrides for invoking a script."""
+ rv = dict(self.env)
+ if overrides:
+ rv.update(overrides)
+ return rv
+
+ @contextlib.contextmanager
+ def isolation(self, input=None, env=None, color=False):
+ """A context manager that sets up the isolation for invoking of a
+ command line tool. This sets up stdin with the given input data
+ and `os.environ` with the overrides from the given dictionary.
+ This also rebinds some internals in Click to be mocked (like the
+ prompt functionality).
+
+ This is automatically done in the :meth:`invoke` method.
+
+ .. versionadded:: 4.0
+ The ``color`` parameter was added.
+
+ :param input: the input stream to put into sys.stdin.
+ :param env: the environment overrides as dictionary.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+ """
+ input = make_input_stream(input, self.charset)
+
+ old_stdin = sys.stdin
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ old_forced_width = formatting.FORCED_WIDTH
+ formatting.FORCED_WIDTH = 80
+
+ env = self.make_env(env)
+
+ if PY2:
+ bytes_output = StringIO()
+ if self.echo_stdin:
+ input = EchoingStdin(input, bytes_output)
+ sys.stdout = bytes_output
+ if not self.mix_stderr:
+ bytes_error = StringIO()
+ sys.stderr = bytes_error
+ else:
+ bytes_output = io.BytesIO()
+ if self.echo_stdin:
+ input = EchoingStdin(input, bytes_output)
+ input = io.TextIOWrapper(input, encoding=self.charset)
+ sys.stdout = io.TextIOWrapper(bytes_output, encoding=self.charset)
+ if not self.mix_stderr:
+ bytes_error = io.BytesIO()
+ sys.stderr = io.TextIOWrapper(bytes_error, encoding=self.charset)
+
+ if self.mix_stderr:
+ sys.stderr = sys.stdout
+
+ sys.stdin = input
+
+ def visible_input(prompt=None):
+ sys.stdout.write(prompt or "")
+ val = input.readline().rstrip("\r\n")
+ sys.stdout.write("{}\n".format(val))
+ sys.stdout.flush()
+ return val
+
+ def hidden_input(prompt=None):
+ sys.stdout.write("{}\n".format(prompt or ""))
+ sys.stdout.flush()
+ return input.readline().rstrip("\r\n")
+
+ def _getchar(echo):
+ char = sys.stdin.read(1)
+ if echo:
+ sys.stdout.write(char)
+ sys.stdout.flush()
+ return char
+
+ default_color = color
+
+ def should_strip_ansi(stream=None, color=None):
+ if color is None:
+ return not default_color
+ return not color
+
+ old_visible_prompt_func = termui.visible_prompt_func
+ old_hidden_prompt_func = termui.hidden_prompt_func
+ old__getchar_func = termui._getchar
+ old_should_strip_ansi = utils.should_strip_ansi
+ termui.visible_prompt_func = visible_input
+ termui.hidden_prompt_func = hidden_input
+ termui._getchar = _getchar
+ utils.should_strip_ansi = should_strip_ansi
+
+ old_env = {}
+ try:
+ for key, value in iteritems(env):
+ old_env[key] = os.environ.get(key)
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ yield (bytes_output, not self.mix_stderr and bytes_error)
+ finally:
+ for key, value in iteritems(old_env):
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ sys.stdin = old_stdin
+ termui.visible_prompt_func = old_visible_prompt_func
+ termui.hidden_prompt_func = old_hidden_prompt_func
+ termui._getchar = old__getchar_func
+ utils.should_strip_ansi = old_should_strip_ansi
+ formatting.FORCED_WIDTH = old_forced_width
+
+ def invoke(
+ self,
+ cli,
+ args=None,
+ input=None,
+ env=None,
+ catch_exceptions=True,
+ color=False,
+ **extra
+ ):
+ """Invokes a command in an isolated environment. The arguments are
+ forwarded directly to the command line script, the `extra` keyword
+ arguments are passed to the :meth:`~clickpkg.Command.main` function of
+ the command.
+
+ This returns a :class:`Result` object.
+
+ .. versionadded:: 3.0
+ The ``catch_exceptions`` parameter was added.
+
+ .. versionchanged:: 3.0
+ The result object now has an `exc_info` attribute with the
+ traceback if available.
+
+ .. versionadded:: 4.0
+ The ``color`` parameter was added.
+
+ :param cli: the command to invoke
+ :param args: the arguments to invoke. It may be given as an iterable
+ or a string. When given as string it will be interpreted
+ as a Unix shell command. More details at
+ :func:`shlex.split`.
+ :param input: the input data for `sys.stdin`.
+ :param env: the environment overrides.
+ :param catch_exceptions: Whether to catch any other exceptions than
+ ``SystemExit``.
+ :param extra: the keyword arguments to pass to :meth:`main`.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+ """
+ exc_info = None
+ with self.isolation(input=input, env=env, color=color) as outstreams:
+ exception = None
+ exit_code = 0
+
+ if isinstance(args, string_types):
+ args = shlex.split(args)
+
+ try:
+ prog_name = extra.pop("prog_name")
+ except KeyError:
+ prog_name = self.get_default_prog_name(cli)
+
+ try:
+ cli.main(args=args or (), prog_name=prog_name, **extra)
+ except SystemExit as e:
+ exc_info = sys.exc_info()
+ exit_code = e.code
+ if exit_code is None:
+ exit_code = 0
+
+ if exit_code != 0:
+ exception = e
+
+ if not isinstance(exit_code, int):
+ sys.stdout.write(str(exit_code))
+ sys.stdout.write("\n")
+ exit_code = 1
+
+ except Exception as e:
+ if not catch_exceptions:
+ raise
+ exception = e
+ exit_code = 1
+ exc_info = sys.exc_info()
+ finally:
+ sys.stdout.flush()
+ stdout = outstreams[0].getvalue()
+ if self.mix_stderr:
+ stderr = None
+ else:
+ stderr = outstreams[1].getvalue()
+
+ return Result(
+ runner=self,
+ stdout_bytes=stdout,
+ stderr_bytes=stderr,
+ exit_code=exit_code,
+ exception=exception,
+ exc_info=exc_info,
+ )
+
+ @contextlib.contextmanager
+ def isolated_filesystem(self):
+ """A context manager that creates a temporary folder and changes
+ the current working directory to it for isolated filesystem tests.
+ """
+ cwd = os.getcwd()
+ t = tempfile.mkdtemp()
+ os.chdir(t)
+ try:
+ yield t
+ finally:
+ os.chdir(cwd)
+ try:
+ shutil.rmtree(t)
+ except (OSError, IOError): # noqa: B014
+ pass
diff --git a/matteo_env/Lib/site-packages/click/types.py b/matteo_env/Lib/site-packages/click/types.py
new file mode 100644
index 0000000..505c39f
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/types.py
@@ -0,0 +1,762 @@
+import os
+import stat
+from datetime import datetime
+
+from ._compat import _get_argv_encoding
+from ._compat import filename_to_ui
+from ._compat import get_filesystem_encoding
+from ._compat import get_streerror
+from ._compat import open_stream
+from ._compat import PY2
+from ._compat import text_type
+from .exceptions import BadParameter
+from .utils import LazyFile
+from .utils import safecall
+
+
+class ParamType(object):
+ """Helper for converting values through types. The following is
+ necessary for a valid type:
+
+ * it needs a name
+ * it needs to pass through None unchanged
+ * it needs to convert from a string
+ * it needs to convert its result type through unchanged
+ (eg: needs to be idempotent)
+ * it needs to be able to deal with param and context being `None`.
+ This can be the case when the object is used with prompt
+ inputs.
+ """
+
+ is_composite = False
+
+ #: the descriptive name of this type
+ name = None
+
+ #: if a list of this type is expected and the value is pulled from a
+ #: string environment variable, this is what splits it up. `None`
+ #: means any whitespace. For all parameters the general rule is that
+ #: whitespace splits them up. The exception are paths and files which
+ #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
+ #: Windows).
+ envvar_list_splitter = None
+
+ def __call__(self, value, param=None, ctx=None):
+ if value is not None:
+ return self.convert(value, param, ctx)
+
+ def get_metavar(self, param):
+ """Returns the metavar default for this param if it provides one."""
+
+ def get_missing_message(self, param):
+ """Optionally might return extra information about a missing
+ parameter.
+
+ .. versionadded:: 2.0
+ """
+
+ def convert(self, value, param, ctx):
+ """Converts the value. This is not invoked for values that are
+ `None` (the missing value).
+ """
+ return value
+
+ def split_envvar_value(self, rv):
+ """Given a value from an environment variable this splits it up
+ into small chunks depending on the defined envvar list splitter.
+
+ If the splitter is set to `None`, which means that whitespace splits,
+ then leading and trailing whitespace is ignored. Otherwise, leading
+ and trailing splitters usually lead to empty items being included.
+ """
+ return (rv or "").split(self.envvar_list_splitter)
+
+ def fail(self, message, param=None, ctx=None):
+ """Helper method to fail with an invalid value message."""
+ raise BadParameter(message, ctx=ctx, param=param)
+
+
+class CompositeParamType(ParamType):
+ is_composite = True
+
+ @property
+ def arity(self):
+ raise NotImplementedError()
+
+
+class FuncParamType(ParamType):
+ def __init__(self, func):
+ self.name = func.__name__
+ self.func = func
+
+ def convert(self, value, param, ctx):
+ try:
+ return self.func(value)
+ except ValueError:
+ try:
+ value = text_type(value)
+ except UnicodeError:
+ value = str(value).decode("utf-8", "replace")
+ self.fail(value, param, ctx)
+
+
+class UnprocessedParamType(ParamType):
+ name = "text"
+
+ def convert(self, value, param, ctx):
+ return value
+
+ def __repr__(self):
+ return "UNPROCESSED"
+
+
+class StringParamType(ParamType):
+ name = "text"
+
+ def convert(self, value, param, ctx):
+ if isinstance(value, bytes):
+ enc = _get_argv_encoding()
+ try:
+ value = value.decode(enc)
+ except UnicodeError:
+ fs_enc = get_filesystem_encoding()
+ if fs_enc != enc:
+ try:
+ value = value.decode(fs_enc)
+ except UnicodeError:
+ value = value.decode("utf-8", "replace")
+ else:
+ value = value.decode("utf-8", "replace")
+ return value
+ return value
+
+ def __repr__(self):
+ return "STRING"
+
+
+class Choice(ParamType):
+ """The choice type allows a value to be checked against a fixed set
+ of supported values. All of these values have to be strings.
+
+ You should only pass a list or tuple of choices. Other iterables
+ (like generators) may lead to surprising results.
+
+ The resulting value will always be one of the originally passed choices
+ regardless of ``case_sensitive`` or any ``ctx.token_normalize_func``
+ being specified.
+
+ See :ref:`choice-opts` for an example.
+
+ :param case_sensitive: Set to false to make choices case
+ insensitive. Defaults to true.
+ """
+
+ name = "choice"
+
+ def __init__(self, choices, case_sensitive=True):
+ self.choices = choices
+ self.case_sensitive = case_sensitive
+
+ def get_metavar(self, param):
+ return "[{}]".format("|".join(self.choices))
+
+ def get_missing_message(self, param):
+ return "Choose from:\n\t{}.".format(",\n\t".join(self.choices))
+
+ def convert(self, value, param, ctx):
+ # Match through normalization and case sensitivity
+ # first do token_normalize_func, then lowercase
+ # preserve original `value` to produce an accurate message in
+ # `self.fail`
+ normed_value = value
+ normed_choices = {choice: choice for choice in self.choices}
+
+ if ctx is not None and ctx.token_normalize_func is not None:
+ normed_value = ctx.token_normalize_func(value)
+ normed_choices = {
+ ctx.token_normalize_func(normed_choice): original
+ for normed_choice, original in normed_choices.items()
+ }
+
+ if not self.case_sensitive:
+ if PY2:
+ lower = str.lower
+ else:
+ lower = str.casefold
+
+ normed_value = lower(normed_value)
+ normed_choices = {
+ lower(normed_choice): original
+ for normed_choice, original in normed_choices.items()
+ }
+
+ if normed_value in normed_choices:
+ return normed_choices[normed_value]
+
+ self.fail(
+ "invalid choice: {}. (choose from {})".format(
+ value, ", ".join(self.choices)
+ ),
+ param,
+ ctx,
+ )
+
+ def __repr__(self):
+ return "Choice('{}')".format(list(self.choices))
+
+
+class DateTime(ParamType):
+ """The DateTime type converts date strings into `datetime` objects.
+
+ The format strings which are checked are configurable, but default to some
+ common (non-timezone aware) ISO 8601 formats.
+
+ When specifying *DateTime* formats, you should only pass a list or a tuple.
+ Other iterables, like generators, may lead to surprising results.
+
+ The format strings are processed using ``datetime.strptime``, and this
+ consequently defines the format strings which are allowed.
+
+ Parsing is tried using each format, in order, and the first format which
+ parses successfully is used.
+
+ :param formats: A list or tuple of date format strings, in the order in
+ which they should be tried. Defaults to
+ ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
+ ``'%Y-%m-%d %H:%M:%S'``.
+ """
+
+ name = "datetime"
+
+ def __init__(self, formats=None):
+ self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"]
+
+ def get_metavar(self, param):
+ return "[{}]".format("|".join(self.formats))
+
+ def _try_to_convert_date(self, value, format):
+ try:
+ return datetime.strptime(value, format)
+ except ValueError:
+ return None
+
+ def convert(self, value, param, ctx):
+ # Exact match
+ for format in self.formats:
+ dtime = self._try_to_convert_date(value, format)
+ if dtime:
+ return dtime
+
+ self.fail(
+ "invalid datetime format: {}. (choose from {})".format(
+ value, ", ".join(self.formats)
+ )
+ )
+
+ def __repr__(self):
+ return "DateTime"
+
+
+class IntParamType(ParamType):
+ name = "integer"
+
+ def convert(self, value, param, ctx):
+ try:
+ return int(value)
+ except ValueError:
+ self.fail("{} is not a valid integer".format(value), param, ctx)
+
+ def __repr__(self):
+ return "INT"
+
+
+class IntRange(IntParamType):
+ """A parameter that works similar to :data:`click.INT` but restricts
+ the value to fit into a range. The default behavior is to fail if the
+ value falls outside the range, but it can also be silently clamped
+ between the two edges.
+
+ See :ref:`ranges` for an example.
+ """
+
+ name = "integer range"
+
+ def __init__(self, min=None, max=None, clamp=False):
+ self.min = min
+ self.max = max
+ self.clamp = clamp
+
+ def convert(self, value, param, ctx):
+ rv = IntParamType.convert(self, value, param, ctx)
+ if self.clamp:
+ if self.min is not None and rv < self.min:
+ return self.min
+ if self.max is not None and rv > self.max:
+ return self.max
+ if (
+ self.min is not None
+ and rv < self.min
+ or self.max is not None
+ and rv > self.max
+ ):
+ if self.min is None:
+ self.fail(
+ "{} is bigger than the maximum valid value {}.".format(
+ rv, self.max
+ ),
+ param,
+ ctx,
+ )
+ elif self.max is None:
+ self.fail(
+ "{} is smaller than the minimum valid value {}.".format(
+ rv, self.min
+ ),
+ param,
+ ctx,
+ )
+ else:
+ self.fail(
+ "{} is not in the valid range of {} to {}.".format(
+ rv, self.min, self.max
+ ),
+ param,
+ ctx,
+ )
+ return rv
+
+ def __repr__(self):
+ return "IntRange({}, {})".format(self.min, self.max)
+
+
+class FloatParamType(ParamType):
+ name = "float"
+
+ def convert(self, value, param, ctx):
+ try:
+ return float(value)
+ except ValueError:
+ self.fail(
+ "{} is not a valid floating point value".format(value), param, ctx
+ )
+
+ def __repr__(self):
+ return "FLOAT"
+
+
+class FloatRange(FloatParamType):
+ """A parameter that works similar to :data:`click.FLOAT` but restricts
+ the value to fit into a range. The default behavior is to fail if the
+ value falls outside the range, but it can also be silently clamped
+ between the two edges.
+
+ See :ref:`ranges` for an example.
+ """
+
+ name = "float range"
+
+ def __init__(self, min=None, max=None, clamp=False):
+ self.min = min
+ self.max = max
+ self.clamp = clamp
+
+ def convert(self, value, param, ctx):
+ rv = FloatParamType.convert(self, value, param, ctx)
+ if self.clamp:
+ if self.min is not None and rv < self.min:
+ return self.min
+ if self.max is not None and rv > self.max:
+ return self.max
+ if (
+ self.min is not None
+ and rv < self.min
+ or self.max is not None
+ and rv > self.max
+ ):
+ if self.min is None:
+ self.fail(
+ "{} is bigger than the maximum valid value {}.".format(
+ rv, self.max
+ ),
+ param,
+ ctx,
+ )
+ elif self.max is None:
+ self.fail(
+ "{} is smaller than the minimum valid value {}.".format(
+ rv, self.min
+ ),
+ param,
+ ctx,
+ )
+ else:
+ self.fail(
+ "{} is not in the valid range of {} to {}.".format(
+ rv, self.min, self.max
+ ),
+ param,
+ ctx,
+ )
+ return rv
+
+ def __repr__(self):
+ return "FloatRange({}, {})".format(self.min, self.max)
+
+
+class BoolParamType(ParamType):
+ name = "boolean"
+
+ def convert(self, value, param, ctx):
+ if isinstance(value, bool):
+ return bool(value)
+ value = value.lower()
+ if value in ("true", "t", "1", "yes", "y"):
+ return True
+ elif value in ("false", "f", "0", "no", "n"):
+ return False
+ self.fail("{} is not a valid boolean".format(value), param, ctx)
+
+ def __repr__(self):
+ return "BOOL"
+
+
+class UUIDParameterType(ParamType):
+ name = "uuid"
+
+ def convert(self, value, param, ctx):
+ import uuid
+
+ try:
+ if PY2 and isinstance(value, text_type):
+ value = value.encode("ascii")
+ return uuid.UUID(value)
+ except ValueError:
+ self.fail("{} is not a valid UUID value".format(value), param, ctx)
+
+ def __repr__(self):
+ return "UUID"
+
+
+class File(ParamType):
+ """Declares a parameter to be a file for reading or writing. The file
+ is automatically closed once the context tears down (after the command
+ finished working).
+
+ Files can be opened for reading or writing. The special value ``-``
+ indicates stdin or stdout depending on the mode.
+
+ By default, the file is opened for reading text data, but it can also be
+ opened in binary mode or for writing. The encoding parameter can be used
+ to force a specific encoding.
+
+ The `lazy` flag controls if the file should be opened immediately or upon
+ first IO. The default is to be non-lazy for standard input and output
+ streams as well as files opened for reading, `lazy` otherwise. When opening a
+ file lazily for reading, it is still opened temporarily for validation, but
+ will not be held open until first IO. lazy is mainly useful when opening
+ for writing to avoid creating the file until it is needed.
+
+ Starting with Click 2.0, files can also be opened atomically in which
+ case all writes go into a separate file in the same folder and upon
+ completion the file will be moved over to the original location. This
+ is useful if a file regularly read by other users is modified.
+
+ See :ref:`file-args` for more information.
+ """
+
+ name = "filename"
+ envvar_list_splitter = os.path.pathsep
+
+ def __init__(
+ self, mode="r", encoding=None, errors="strict", lazy=None, atomic=False
+ ):
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.lazy = lazy
+ self.atomic = atomic
+
+ def resolve_lazy_flag(self, value):
+ if self.lazy is not None:
+ return self.lazy
+ if value == "-":
+ return False
+ elif "w" in self.mode:
+ return True
+ return False
+
+ def convert(self, value, param, ctx):
+ try:
+ if hasattr(value, "read") or hasattr(value, "write"):
+ return value
+
+ lazy = self.resolve_lazy_flag(value)
+
+ if lazy:
+ f = LazyFile(
+ value, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+ if ctx is not None:
+ ctx.call_on_close(f.close_intelligently)
+ return f
+
+ f, should_close = open_stream(
+ value, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+ # If a context is provided, we automatically close the file
+ # at the end of the context execution (or flush out). If a
+ # context does not exist, it's the caller's responsibility to
+ # properly close the file. This for instance happens when the
+ # type is used with prompts.
+ if ctx is not None:
+ if should_close:
+ ctx.call_on_close(safecall(f.close))
+ else:
+ ctx.call_on_close(safecall(f.flush))
+ return f
+ except (IOError, OSError) as e: # noqa: B014
+ self.fail(
+ "Could not open file: {}: {}".format(
+ filename_to_ui(value), get_streerror(e)
+ ),
+ param,
+ ctx,
+ )
+
+
+class Path(ParamType):
+ """The path type is similar to the :class:`File` type but it performs
+ different checks. First of all, instead of returning an open file
+ handle it returns just the filename. Secondly, it can perform various
+ basic checks about what the file or directory should be.
+
+ .. versionchanged:: 6.0
+ `allow_dash` was added.
+
+ :param exists: if set to true, the file or directory needs to exist for
+ this value to be valid. If this is not required and a
+ file does indeed not exist, then all further checks are
+ silently skipped.
+ :param file_okay: controls if a file is a possible value.
+ :param dir_okay: controls if a directory is a possible value.
+ :param writable: if true, a writable check is performed.
+ :param readable: if true, a readable check is performed.
+ :param resolve_path: if this is true, then the path is fully resolved
+ before the value is passed onwards. This means
+ that it's absolute and symlinks are resolved. It
+ will not expand a tilde-prefix, as this is
+ supposed to be done by the shell only.
+ :param allow_dash: If this is set to `True`, a single dash to indicate
+ standard streams is permitted.
+ :param path_type: optionally a string type that should be used to
+ represent the path. The default is `None` which
+ means the return value will be either bytes or
+ unicode depending on what makes most sense given the
+ input data Click deals with.
+ """
+
+ envvar_list_splitter = os.path.pathsep
+
+ def __init__(
+ self,
+ exists=False,
+ file_okay=True,
+ dir_okay=True,
+ writable=False,
+ readable=True,
+ resolve_path=False,
+ allow_dash=False,
+ path_type=None,
+ ):
+ self.exists = exists
+ self.file_okay = file_okay
+ self.dir_okay = dir_okay
+ self.writable = writable
+ self.readable = readable
+ self.resolve_path = resolve_path
+ self.allow_dash = allow_dash
+ self.type = path_type
+
+ if self.file_okay and not self.dir_okay:
+ self.name = "file"
+ self.path_type = "File"
+ elif self.dir_okay and not self.file_okay:
+ self.name = "directory"
+ self.path_type = "Directory"
+ else:
+ self.name = "path"
+ self.path_type = "Path"
+
+ def coerce_path_result(self, rv):
+ if self.type is not None and not isinstance(rv, self.type):
+ if self.type is text_type:
+ rv = rv.decode(get_filesystem_encoding())
+ else:
+ rv = rv.encode(get_filesystem_encoding())
+ return rv
+
+ def convert(self, value, param, ctx):
+ rv = value
+
+ is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-")
+
+ if not is_dash:
+ if self.resolve_path:
+ rv = os.path.realpath(rv)
+
+ try:
+ st = os.stat(rv)
+ except OSError:
+ if not self.exists:
+ return self.coerce_path_result(rv)
+ self.fail(
+ "{} '{}' does not exist.".format(
+ self.path_type, filename_to_ui(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if not self.file_okay and stat.S_ISREG(st.st_mode):
+ self.fail(
+ "{} '{}' is a file.".format(self.path_type, filename_to_ui(value)),
+ param,
+ ctx,
+ )
+ if not self.dir_okay and stat.S_ISDIR(st.st_mode):
+ self.fail(
+ "{} '{}' is a directory.".format(
+ self.path_type, filename_to_ui(value)
+ ),
+ param,
+ ctx,
+ )
+ if self.writable and not os.access(value, os.W_OK):
+ self.fail(
+ "{} '{}' is not writable.".format(
+ self.path_type, filename_to_ui(value)
+ ),
+ param,
+ ctx,
+ )
+ if self.readable and not os.access(value, os.R_OK):
+ self.fail(
+ "{} '{}' is not readable.".format(
+ self.path_type, filename_to_ui(value)
+ ),
+ param,
+ ctx,
+ )
+
+ return self.coerce_path_result(rv)
+
+
+class Tuple(CompositeParamType):
+ """The default behavior of Click is to apply a type on a value directly.
+ This works well in most cases, except for when `nargs` is set to a fixed
+ count and different types should be used for different items. In this
+ case the :class:`Tuple` type can be used. This type can only be used
+ if `nargs` is set to a fixed number.
+
+ For more information see :ref:`tuple-type`.
+
+ This can be selected by using a Python tuple literal as a type.
+
+ :param types: a list of types that should be used for the tuple items.
+ """
+
+ def __init__(self, types):
+ self.types = [convert_type(ty) for ty in types]
+
+ @property
+ def name(self):
+ return "<{}>".format(" ".join(ty.name for ty in self.types))
+
+ @property
+ def arity(self):
+ return len(self.types)
+
+ def convert(self, value, param, ctx):
+ if len(value) != len(self.types):
+ raise TypeError(
+ "It would appear that nargs is set to conflict with the"
+ " composite type arity."
+ )
+ return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
+
+
+def convert_type(ty, default=None):
+ """Converts a callable or python type into the most appropriate
+ param type.
+ """
+ guessed_type = False
+ if ty is None and default is not None:
+ if isinstance(default, tuple):
+ ty = tuple(map(type, default))
+ else:
+ ty = type(default)
+ guessed_type = True
+
+ if isinstance(ty, tuple):
+ return Tuple(ty)
+ if isinstance(ty, ParamType):
+ return ty
+ if ty is text_type or ty is str or ty is None:
+ return STRING
+ if ty is int:
+ return INT
+ # Booleans are only okay if not guessed. This is done because for
+ # flags the default value is actually a bit of a lie in that it
+ # indicates which of the flags is the one we want. See get_default()
+ # for more information.
+ if ty is bool and not guessed_type:
+ return BOOL
+ if ty is float:
+ return FLOAT
+ if guessed_type:
+ return STRING
+
+ # Catch a common mistake
+ if __debug__:
+ try:
+ if issubclass(ty, ParamType):
+ raise AssertionError(
+ "Attempted to use an uninstantiated parameter type ({}).".format(ty)
+ )
+ except TypeError:
+ pass
+ return FuncParamType(ty)
+
+
+#: A dummy parameter type that just does nothing. From a user's
+#: perspective this appears to just be the same as `STRING` but internally
+#: no string conversion takes place. This is necessary to achieve the
+#: same bytes/unicode behavior on Python 2/3 in situations where you want
+#: to not convert argument types. This is usually useful when working
+#: with file paths as they can appear in bytes and unicode.
+#:
+#: For path related uses the :class:`Path` type is a better choice but
+#: there are situations where an unprocessed type is useful which is why
+#: it is is provided.
+#:
+#: .. versionadded:: 4.0
+UNPROCESSED = UnprocessedParamType()
+
+#: A unicode string parameter type which is the implicit default. This
+#: can also be selected by using ``str`` as type.
+STRING = StringParamType()
+
+#: An integer parameter. This can also be selected by using ``int`` as
+#: type.
+INT = IntParamType()
+
+#: A floating point value parameter. This can also be selected by using
+#: ``float`` as type.
+FLOAT = FloatParamType()
+
+#: A boolean parameter. This is the default for boolean flags. This can
+#: also be selected by using ``bool`` as a type.
+BOOL = BoolParamType()
+
+#: A UUID parameter.
+UUID = UUIDParameterType()
diff --git a/matteo_env/Lib/site-packages/click/utils.py b/matteo_env/Lib/site-packages/click/utils.py
new file mode 100644
index 0000000..79265e7
--- /dev/null
+++ b/matteo_env/Lib/site-packages/click/utils.py
@@ -0,0 +1,455 @@
+import os
+import sys
+
+from ._compat import _default_text_stderr
+from ._compat import _default_text_stdout
+from ._compat import auto_wrap_for_ansi
+from ._compat import binary_streams
+from ._compat import filename_to_ui
+from ._compat import get_filesystem_encoding
+from ._compat import get_streerror
+from ._compat import is_bytes
+from ._compat import open_stream
+from ._compat import PY2
+from ._compat import should_strip_ansi
+from ._compat import string_types
+from ._compat import strip_ansi
+from ._compat import text_streams
+from ._compat import text_type
+from ._compat import WIN
+from .globals import resolve_color_default
+
+if not PY2:
+ from ._compat import _find_binary_writer
+elif WIN:
+ from ._winconsole import _get_windows_argv
+ from ._winconsole import _hash_py_argv
+ from ._winconsole import _initial_argv_hash
+
+echo_native_types = string_types + (bytes, bytearray)
+
+
+def _posixify(name):
+ return "-".join(name.split()).lower()
+
+
+def safecall(func):
+ """Wraps a function so that it swallows exceptions."""
+
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception:
+ pass
+
+ return wrapper
+
+
+def make_str(value):
+ """Converts a value into a valid string."""
+ if isinstance(value, bytes):
+ try:
+ return value.decode(get_filesystem_encoding())
+ except UnicodeError:
+ return value.decode("utf-8", "replace")
+ return text_type(value)
+
+
+def make_default_short_help(help, max_length=45):
+ """Return a condensed version of help string."""
+ words = help.split()
+ total_length = 0
+ result = []
+ done = False
+
+ for word in words:
+ if word[-1:] == ".":
+ done = True
+ new_length = 1 + len(word) if result else len(word)
+ if total_length + new_length > max_length:
+ result.append("...")
+ done = True
+ else:
+ if result:
+ result.append(" ")
+ result.append(word)
+ if done:
+ break
+ total_length += new_length
+
+ return "".join(result)
+
+
+class LazyFile(object):
+ """A lazy file works like a regular file but it does not fully open
+ the file but it does perform some basic checks early to see if the
+ filename parameter does make sense. This is useful for safely opening
+ files for writing.
+ """
+
+ def __init__(
+ self, filename, mode="r", encoding=None, errors="strict", atomic=False
+ ):
+ self.name = filename
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.atomic = atomic
+
+ if filename == "-":
+ self._f, self.should_close = open_stream(filename, mode, encoding, errors)
+ else:
+ if "r" in mode:
+ # Open and close the file in case we're opening it for
+ # reading so that we can catch at least some errors in
+ # some cases early.
+ open(filename, mode).close()
+ self._f = None
+ self.should_close = True
+
+ def __getattr__(self, name):
+ return getattr(self.open(), name)
+
+ def __repr__(self):
+ if self._f is not None:
+ return repr(self._f)
+ return "".format(self.name, self.mode)
+
+ def open(self):
+ """Opens the file if it's not yet open. This call might fail with
+ a :exc:`FileError`. Not handling this error will produce an error
+ that Click shows.
+ """
+ if self._f is not None:
+ return self._f
+ try:
+ rv, self.should_close = open_stream(
+ self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+ except (IOError, OSError) as e: # noqa: E402
+ from .exceptions import FileError
+
+ raise FileError(self.name, hint=get_streerror(e))
+ self._f = rv
+ return rv
+
+ def close(self):
+ """Closes the underlying file, no matter what."""
+ if self._f is not None:
+ self._f.close()
+
+ def close_intelligently(self):
+ """This function only closes the file if it was opened by the lazy
+ file wrapper. For instance this will never close stdin.
+ """
+ if self.should_close:
+ self.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.close_intelligently()
+
+ def __iter__(self):
+ self.open()
+ return iter(self._f)
+
+
+class KeepOpenFile(object):
+ def __init__(self, file):
+ self._file = file
+
+ def __getattr__(self, name):
+ return getattr(self._file, name)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ pass
+
+ def __repr__(self):
+ return repr(self._file)
+
+ def __iter__(self):
+ return iter(self._file)
+
+
+def echo(message=None, file=None, nl=True, err=False, color=None):
+ """Prints a message plus a newline to the given file or stdout. On
+ first sight, this looks like the print function, but it has improved
+ support for handling Unicode and binary data that does not fail no
+ matter how badly configured the system is.
+
+ Primarily it means that you can print binary data as well as Unicode
+ data on both 2.x and 3.x to the given file in the most appropriate way
+ possible. This is a very carefree function in that it will try its
+ best to not fail. As of Click 6.0 this includes support for unicode
+ output on the Windows console.
+
+ In addition to that, if `colorama`_ is installed, the echo function will
+ also support clever handling of ANSI codes. Essentially it will then
+ do the following:
+
+ - add transparent handling of ANSI color codes on Windows.
+ - hide ANSI codes automatically if the destination file is not a
+ terminal.
+
+ .. _colorama: https://pypi.org/project/colorama/
+
+ .. versionchanged:: 6.0
+ As of Click 6.0 the echo function will properly support unicode
+ output on the windows console. Not that click does not modify
+ the interpreter in any way which means that `sys.stdout` or the
+ print statement or function will still not provide unicode support.
+
+ .. versionchanged:: 2.0
+ Starting with version 2.0 of Click, the echo function will work
+ with colorama if it's installed.
+
+ .. versionadded:: 3.0
+ The `err` parameter was added.
+
+ .. versionchanged:: 4.0
+ Added the `color` flag.
+
+ :param message: the message to print
+ :param file: the file to write to (defaults to ``stdout``)
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``. This is faster and easier than calling
+ :func:`get_text_stderr` yourself.
+ :param nl: if set to `True` (the default) a newline is printed afterwards.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection.
+ """
+ if file is None:
+ if err:
+ file = _default_text_stderr()
+ else:
+ file = _default_text_stdout()
+
+ # Convert non bytes/text into the native string type.
+ if message is not None and not isinstance(message, echo_native_types):
+ message = text_type(message)
+
+ if nl:
+ message = message or u""
+ if isinstance(message, text_type):
+ message += u"\n"
+ else:
+ message += b"\n"
+
+ # If there is a message, and we're in Python 3, and the value looks
+ # like bytes, we manually need to find the binary stream and write the
+ # message in there. This is done separately so that most stream
+ # types will work as you would expect. Eg: you can write to StringIO
+ # for other cases.
+ if message and not PY2 and is_bytes(message):
+ binary_file = _find_binary_writer(file)
+ if binary_file is not None:
+ file.flush()
+ binary_file.write(message)
+ binary_file.flush()
+ return
+
+ # ANSI-style support. If there is no message or we are dealing with
+ # bytes nothing is happening. If we are connected to a file we want
+ # to strip colors. If we are on windows we either wrap the stream
+ # to strip the color or we use the colorama support to translate the
+ # ansi codes to API calls.
+ if message and not is_bytes(message):
+ color = resolve_color_default(color)
+ if should_strip_ansi(file, color):
+ message = strip_ansi(message)
+ elif WIN:
+ if auto_wrap_for_ansi is not None:
+ file = auto_wrap_for_ansi(file)
+ elif not color:
+ message = strip_ansi(message)
+
+ if message:
+ file.write(message)
+ file.flush()
+
+
+def get_binary_stream(name):
+ """Returns a system stream for byte processing. This essentially
+ returns the stream from the sys module with the given name but it
+ solves some compatibility issues between different Python versions.
+ Primarily this function is necessary for getting binary streams on
+ Python 3.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ """
+ opener = binary_streams.get(name)
+ if opener is None:
+ raise TypeError("Unknown standard stream '{}'".format(name))
+ return opener()
+
+
+def get_text_stream(name, encoding=None, errors="strict"):
+ """Returns a system stream for text processing. This usually returns
+ a wrapped stream around a binary stream returned from
+ :func:`get_binary_stream` but it also can take shortcuts on Python 3
+ for already correctly configured streams.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ :param encoding: overrides the detected default encoding.
+ :param errors: overrides the default error mode.
+ """
+ opener = text_streams.get(name)
+ if opener is None:
+ raise TypeError("Unknown standard stream '{}'".format(name))
+ return opener(encoding, errors)
+
+
+def open_file(
+ filename, mode="r", encoding=None, errors="strict", lazy=False, atomic=False
+):
+ """This is similar to how the :class:`File` works but for manual
+ usage. Files are opened non lazy by default. This can open regular
+ files as well as stdin/stdout if ``'-'`` is passed.
+
+ If stdin/stdout is returned the stream is wrapped so that the context
+ manager will not close the stream accidentally. This makes it possible
+ to always use the function like this without having to worry to
+ accidentally close a standard stream::
+
+ with open_file(filename) as f:
+ ...
+
+ .. versionadded:: 3.0
+
+ :param filename: the name of the file to open (or ``'-'`` for stdin/stdout).
+ :param mode: the mode in which to open the file.
+ :param encoding: the encoding to use.
+ :param errors: the error handling for this file.
+ :param lazy: can be flipped to true to open the file lazily.
+ :param atomic: in atomic mode writes go into a temporary file and it's
+ moved on close.
+ """
+ if lazy:
+ return LazyFile(filename, mode, encoding, errors, atomic=atomic)
+ f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
+ if not should_close:
+ f = KeepOpenFile(f)
+ return f
+
+
+def get_os_args():
+ """This returns the argument part of sys.argv in the most appropriate
+ form for processing. What this means is that this return value is in
+ a format that works for Click to process but does not necessarily
+ correspond well to what's actually standard for the interpreter.
+
+ On most environments the return value is ``sys.argv[:1]`` unchanged.
+ However if you are on Windows and running Python 2 the return value
+ will actually be a list of unicode strings instead because the
+ default behavior on that platform otherwise will not be able to
+ carry all possible values that sys.argv can have.
+
+ .. versionadded:: 6.0
+ """
+ # We can only extract the unicode argv if sys.argv has not been
+ # changed since the startup of the application.
+ if PY2 and WIN and _initial_argv_hash == _hash_py_argv():
+ return _get_windows_argv()
+ return sys.argv[1:]
+
+
+def format_filename(filename, shorten=False):
+ """Formats a filename for user display. The main purpose of this
+ function is to ensure that the filename can be displayed at all. This
+ will decode the filename to unicode if necessary in a way that it will
+ not fail. Optionally, it can shorten the filename to not include the
+ full path to the filename.
+
+ :param filename: formats a filename for UI display. This will also convert
+ the filename into unicode without failing.
+ :param shorten: this optionally shortens the filename to strip of the
+ path that leads up to it.
+ """
+ if shorten:
+ filename = os.path.basename(filename)
+ return filename_to_ui(filename)
+
+
+def get_app_dir(app_name, roaming=True, force_posix=False):
+ r"""Returns the config folder for the application. The default behavior
+ is to return whatever is most appropriate for the operating system.
+
+ To give you an idea, for an app called ``"Foo Bar"``, something like
+ the following folders could be returned:
+
+ Mac OS X:
+ ``~/Library/Application Support/Foo Bar``
+ Mac OS X (POSIX):
+ ``~/.foo-bar``
+ Unix:
+ ``~/.config/foo-bar``
+ Unix (POSIX):
+ ``~/.foo-bar``
+ Win XP (roaming):
+ ``C:\Documents and Settings\\Local Settings\Application Data\Foo Bar``
+ Win XP (not roaming):
+ ``C:\Documents and Settings\\Application Data\Foo Bar``
+ Win 7 (roaming):
+ ``C:\Users\\AppData\Roaming\Foo Bar``
+ Win 7 (not roaming):
+ ``C:\Users\\AppData\Local\Foo Bar``
+
+ .. versionadded:: 2.0
+
+ :param app_name: the application name. This should be properly capitalized
+ and can contain whitespace.
+ :param roaming: controls if the folder should be roaming or not on Windows.
+ Has no affect otherwise.
+ :param force_posix: if this is set to `True` then on any POSIX system the
+ folder will be stored in the home folder with a leading
+ dot instead of the XDG config home or darwin's
+ application support folder.
+ """
+ if WIN:
+ key = "APPDATA" if roaming else "LOCALAPPDATA"
+ folder = os.environ.get(key)
+ if folder is None:
+ folder = os.path.expanduser("~")
+ return os.path.join(folder, app_name)
+ if force_posix:
+ return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name))))
+ if sys.platform == "darwin":
+ return os.path.join(
+ os.path.expanduser("~/Library/Application Support"), app_name
+ )
+ return os.path.join(
+ os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
+ _posixify(app_name),
+ )
+
+
+class PacifyFlushWrapper(object):
+ """This wrapper is used to catch and suppress BrokenPipeErrors resulting
+ from ``.flush()`` being called on broken pipe during the shutdown/final-GC
+ of the Python interpreter. Notably ``.flush()`` is always called on
+ ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
+ other cleanup code, and the case where the underlying file is not a broken
+ pipe, all calls and attributes are proxied.
+ """
+
+ def __init__(self, wrapped):
+ self.wrapped = wrapped
+
+ def flush(self):
+ try:
+ self.wrapped.flush()
+ except IOError as e:
+ import errno
+
+ if e.errno != errno.EPIPE:
+ raise
+
+ def __getattr__(self, attr):
+ return getattr(self.wrapped, attr)
diff --git a/matteo_env/Lib/site-packages/engineio/__init__.py b/matteo_env/Lib/site-packages/engineio/__init__.py
new file mode 100644
index 0000000..eb9f5fe
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/__init__.py
@@ -0,0 +1,25 @@
+import sys
+
+from .client import Client
+from .middleware import WSGIApp, Middleware
+from .server import Server
+if sys.version_info >= (3, 5): # pragma: no cover
+ from .asyncio_server import AsyncServer
+ from .asyncio_client import AsyncClient
+ from .async_drivers.asgi import ASGIApp
+ try:
+ from .async_drivers.tornado import get_tornado_handler
+ except ImportError:
+ get_tornado_handler = None
+else: # pragma: no cover
+ AsyncServer = None
+ AsyncClient = None
+ get_tornado_handler = None
+ ASGIApp = None
+
+__version__ = '4.0.0'
+
+__all__ = ['__version__', 'Server', 'WSGIApp', 'Middleware', 'Client']
+if AsyncServer is not None: # pragma: no cover
+ __all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler',
+ 'AsyncClient'],
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/__init__.py b/matteo_env/Lib/site-packages/engineio/async_drivers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/aiohttp.py b/matteo_env/Lib/site-packages/engineio/async_drivers/aiohttp.py
new file mode 100644
index 0000000..a591995
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/aiohttp.py
@@ -0,0 +1,127 @@
+import asyncio
+import sys
+from urllib.parse import urlsplit
+
+from aiohttp.web import Response, WebSocketResponse
+
+
+def create_route(app, engineio_server, engineio_endpoint):
+ """This function sets up the engine.io endpoint as a route for the
+ application.
+
+ Note that both GET and POST requests must be hooked up on the engine.io
+ endpoint.
+ """
+ app.router.add_get(engineio_endpoint, engineio_server.handle_request)
+ app.router.add_post(engineio_endpoint, engineio_server.handle_request)
+ app.router.add_route('OPTIONS', engineio_endpoint,
+ engineio_server.handle_request)
+
+
+def translate_request(request):
+ """This function takes the arguments passed to the request handler and
+ uses them to generate a WSGI compatible environ dictionary.
+ """
+ message = request._message
+ payload = request._payload
+
+ uri_parts = urlsplit(message.path)
+ environ = {
+ 'wsgi.input': payload,
+ 'wsgi.errors': sys.stderr,
+ 'wsgi.version': (1, 0),
+ 'wsgi.async': True,
+ 'wsgi.multithread': False,
+ 'wsgi.multiprocess': False,
+ 'wsgi.run_once': False,
+ 'SERVER_SOFTWARE': 'aiohttp',
+ 'REQUEST_METHOD': message.method,
+ 'QUERY_STRING': uri_parts.query or '',
+ 'RAW_URI': message.path,
+ 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version,
+ 'REMOTE_ADDR': '127.0.0.1',
+ 'REMOTE_PORT': '0',
+ 'SERVER_NAME': 'aiohttp',
+ 'SERVER_PORT': '0',
+ 'aiohttp.request': request
+ }
+
+ for hdr_name, hdr_value in message.headers.items():
+ hdr_name = hdr_name.upper()
+ if hdr_name == 'CONTENT-TYPE':
+ environ['CONTENT_TYPE'] = hdr_value
+ continue
+ elif hdr_name == 'CONTENT-LENGTH':
+ environ['CONTENT_LENGTH'] = hdr_value
+ continue
+
+ key = 'HTTP_%s' % hdr_name.replace('-', '_')
+ if key in environ:
+ hdr_value = '%s,%s' % (environ[key], hdr_value)
+
+ environ[key] = hdr_value
+
+ environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
+
+ path_info = uri_parts.path
+
+ environ['PATH_INFO'] = path_info
+ environ['SCRIPT_NAME'] = ''
+
+ return environ
+
+
+def make_response(status, headers, payload, environ):
+ """This function generates an appropriate response object for this async
+ mode.
+ """
+ return Response(body=payload, status=int(status.split()[0]),
+ headers=headers)
+
+
+class WebSocket(object): # pragma: no cover
+ """
+ This wrapper class provides a aiohttp WebSocket interface that is
+ somewhat compatible with eventlet's implementation.
+ """
+ def __init__(self, handler):
+ self.handler = handler
+ self._sock = None
+
+ async def __call__(self, environ):
+ request = environ['aiohttp.request']
+ self._sock = WebSocketResponse()
+ await self._sock.prepare(request)
+
+ self.environ = environ
+ await self.handler(self)
+ return self._sock
+
+ async def close(self):
+ await self._sock.close()
+
+ async def send(self, message):
+ if isinstance(message, bytes):
+ f = self._sock.send_bytes
+ else:
+ f = self._sock.send_str
+ if asyncio.iscoroutinefunction(f):
+ await f(message)
+ else:
+ f(message)
+
+ async def wait(self):
+ msg = await self._sock.receive()
+ if not isinstance(msg.data, bytes) and \
+ not isinstance(msg.data, str):
+ raise IOError()
+ return msg.data
+
+
+_async = {
+ 'asyncio': True,
+ 'create_route': create_route,
+ 'translate_request': translate_request,
+ 'make_response': make_response,
+ 'websocket': WebSocket,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/asgi.py b/matteo_env/Lib/site-packages/engineio/async_drivers/asgi.py
new file mode 100644
index 0000000..fe94e59
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/asgi.py
@@ -0,0 +1,250 @@
+import os
+import sys
+import asyncio
+
+from engineio.static_files import get_static_file
+
+
+class ASGIApp:
+ """ASGI application middleware for Engine.IO.
+
+ This middleware dispatches traffic to an Engine.IO application. It can
+ also serve a list of static files to the client, or forward unrelated
+ HTTP traffic to another ASGI application.
+
+ :param engineio_server: The Engine.IO server. Must be an instance of the
+ ``engineio.AsyncServer`` class.
+ :param static_files: A dictionary with static file mapping rules. See the
+ documentation for details on this argument.
+ :param other_asgi_app: A separate ASGI app that receives all other traffic.
+ :param engineio_path: The endpoint where the Engine.IO application should
+ be installed. The default value is appropriate for
+ most cases.
+ :param on_startup: function to be called on application startup; can be
+ coroutine
+ :param on_shutdown: function to be called on application shutdown; can be
+ coroutine
+
+ Example usage::
+
+ import engineio
+ import uvicorn
+
+ eio = engineio.AsyncServer()
+ app = engineio.ASGIApp(eio, static_files={
+ '/': {'content_type': 'text/html', 'filename': 'index.html'},
+ '/index.html': {'content_type': 'text/html',
+ 'filename': 'index.html'},
+ })
+ uvicorn.run(app, '127.0.0.1', 5000)
+ """
+ def __init__(self, engineio_server, other_asgi_app=None,
+ static_files=None, engineio_path='engine.io',
+ on_startup=None, on_shutdown=None):
+ self.engineio_server = engineio_server
+ self.other_asgi_app = other_asgi_app
+ self.engineio_path = engineio_path.strip('/')
+ self.static_files = static_files or {}
+ self.on_startup = on_startup
+ self.on_shutdown = on_shutdown
+
+ async def __call__(self, scope, receive, send):
+ if scope['type'] in ['http', 'websocket'] and \
+ scope['path'].startswith('/{0}/'.format(self.engineio_path)):
+ await self.engineio_server.handle_request(scope, receive, send)
+ else:
+ static_file = get_static_file(scope['path'], self.static_files) \
+ if scope['type'] == 'http' and self.static_files else None
+ if static_file:
+ await self.serve_static_file(static_file, receive, send)
+ elif self.other_asgi_app is not None:
+ await self.other_asgi_app(scope, receive, send)
+ elif scope['type'] == 'lifespan':
+ await self.lifespan(receive, send)
+ else:
+ await self.not_found(receive, send)
+
+ async def serve_static_file(self, static_file, receive,
+ send): # pragma: no cover
+ event = await receive()
+ if event['type'] == 'http.request':
+ if os.path.exists(static_file['filename']):
+ with open(static_file['filename'], 'rb') as f:
+ payload = f.read()
+ await send({'type': 'http.response.start',
+ 'status': 200,
+ 'headers': [(b'Content-Type', static_file[
+ 'content_type'].encode('utf-8'))]})
+ await send({'type': 'http.response.body',
+ 'body': payload})
+ else:
+ await self.not_found(receive, send)
+
+ async def lifespan(self, receive, send):
+ while True:
+ event = await receive()
+ if event['type'] == 'lifespan.startup':
+ if self.on_startup:
+ try:
+ await self.on_startup() \
+ if asyncio.iscoroutinefunction(self.on_startup) \
+ else self.on_startup()
+ except:
+ await send({'type': 'lifespan.startup.failed'})
+ return
+ await send({'type': 'lifespan.startup.complete'})
+ elif event['type'] == 'lifespan.shutdown':
+ if self.on_shutdown:
+ try:
+ await self.on_shutdown() \
+ if asyncio.iscoroutinefunction(self.on_shutdown) \
+ else self.on_shutdown()
+ except:
+ await send({'type': 'lifespan.shutdown.failed'})
+ return
+ await send({'type': 'lifespan.shutdown.complete'})
+ return
+
+ async def not_found(self, receive, send):
+ """Return a 404 Not Found error to the client."""
+ await send({'type': 'http.response.start',
+ 'status': 404,
+ 'headers': [(b'Content-Type', b'text/plain')]})
+ await send({'type': 'http.response.body',
+ 'body': b'Not Found'})
+
+
+async def translate_request(scope, receive, send):
+ class AwaitablePayload(object): # pragma: no cover
+ def __init__(self, payload):
+ self.payload = payload or b''
+
+ async def read(self, length=None):
+ if length is None:
+ r = self.payload
+ self.payload = b''
+ else:
+ r = self.payload[:length]
+ self.payload = self.payload[length:]
+ return r
+
+ event = await receive()
+ payload = b''
+ if event['type'] == 'http.request':
+ payload += event.get('body') or b''
+ while event.get('more_body'):
+ event = await receive()
+ if event['type'] == 'http.request':
+ payload += event.get('body') or b''
+ elif event['type'] == 'websocket.connect':
+ pass
+ else:
+ return {}
+
+ raw_uri = scope['path'].encode('utf-8')
+ if 'query_string' in scope and scope['query_string']:
+ raw_uri += b'?' + scope['query_string']
+ environ = {
+ 'wsgi.input': AwaitablePayload(payload),
+ 'wsgi.errors': sys.stderr,
+ 'wsgi.version': (1, 0),
+ 'wsgi.async': True,
+ 'wsgi.multithread': False,
+ 'wsgi.multiprocess': False,
+ 'wsgi.run_once': False,
+ 'SERVER_SOFTWARE': 'asgi',
+ 'REQUEST_METHOD': scope.get('method', 'GET'),
+ 'PATH_INFO': scope['path'],
+ 'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'),
+ 'RAW_URI': raw_uri.decode('utf-8'),
+ 'SCRIPT_NAME': '',
+ 'SERVER_PROTOCOL': 'HTTP/1.1',
+ 'REMOTE_ADDR': '127.0.0.1',
+ 'REMOTE_PORT': '0',
+ 'SERVER_NAME': 'asgi',
+ 'SERVER_PORT': '0',
+ 'asgi.receive': receive,
+ 'asgi.send': send,
+ 'asgi.scope': scope,
+ }
+
+ for hdr_name, hdr_value in scope['headers']:
+ hdr_name = hdr_name.upper().decode('utf-8')
+ hdr_value = hdr_value.decode('utf-8')
+ if hdr_name == 'CONTENT-TYPE':
+ environ['CONTENT_TYPE'] = hdr_value
+ continue
+ elif hdr_name == 'CONTENT-LENGTH':
+ environ['CONTENT_LENGTH'] = hdr_value
+ continue
+
+ key = 'HTTP_%s' % hdr_name.replace('-', '_')
+ if key in environ:
+ hdr_value = '%s,%s' % (environ[key], hdr_value)
+
+ environ[key] = hdr_value
+
+ environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
+ return environ
+
+
+async def make_response(status, headers, payload, environ):
+ headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers]
+ if 'HTTP_SEC_WEBSOCKET_VERSION' in environ:
+ if status.startswith('200 '):
+ await environ['asgi.send']({'type': 'websocket.accept',
+ 'headers': headers})
+ else:
+ await environ['asgi.send']({'type': 'websocket.close'})
+ return
+
+ await environ['asgi.send']({'type': 'http.response.start',
+ 'status': int(status.split(' ')[0]),
+ 'headers': headers})
+ await environ['asgi.send']({'type': 'http.response.body',
+ 'body': payload})
+
+
+class WebSocket(object): # pragma: no cover
+ """
+ This wrapper class provides an asgi WebSocket interface that is
+ somewhat compatible with eventlet's implementation.
+ """
+ def __init__(self, handler):
+ self.handler = handler
+ self.asgi_receive = None
+ self.asgi_send = None
+
+ async def __call__(self, environ):
+ self.asgi_receive = environ['asgi.receive']
+ self.asgi_send = environ['asgi.send']
+ await self.asgi_send({'type': 'websocket.accept'})
+ await self.handler(self)
+
+ async def close(self):
+ await self.asgi_send({'type': 'websocket.close'})
+
+ async def send(self, message):
+ msg_bytes = None
+ msg_text = None
+ if isinstance(message, bytes):
+ msg_bytes = message
+ else:
+ msg_text = message
+ await self.asgi_send({'type': 'websocket.send',
+ 'bytes': msg_bytes,
+ 'text': msg_text})
+
+ async def wait(self):
+ event = await self.asgi_receive()
+ if event['type'] != 'websocket.receive':
+ raise IOError()
+ return event.get('bytes') or event.get('text')
+
+
+_async = {
+ 'asyncio': True,
+ 'translate_request': translate_request,
+ 'make_response': make_response,
+ 'websocket': WebSocket,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/eventlet.py b/matteo_env/Lib/site-packages/engineio/async_drivers/eventlet.py
new file mode 100644
index 0000000..9be3797
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/eventlet.py
@@ -0,0 +1,30 @@
+from __future__ import absolute_import
+
+from eventlet.green.threading import Thread, Event
+from eventlet import queue
+from eventlet import sleep
+from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI
+
+
+class WebSocketWSGI(_WebSocketWSGI):
+ def __init__(self, *args, **kwargs):
+ super(WebSocketWSGI, self).__init__(*args, **kwargs)
+ self._sock = None
+
+ def __call__(self, environ, start_response):
+ if 'eventlet.input' not in environ:
+ raise RuntimeError('You need to use the eventlet server. '
+ 'See the Deployment section of the '
+ 'documentation for more information.')
+ self._sock = environ['eventlet.input'].get_socket()
+ return super(WebSocketWSGI, self).__call__(environ, start_response)
+
+
+_async = {
+ 'thread': Thread,
+ 'queue': queue.Queue,
+ 'queue_empty': queue.Empty,
+ 'event': Event,
+ 'websocket': WebSocketWSGI,
+ 'sleep': sleep,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/gevent.py b/matteo_env/Lib/site-packages/engineio/async_drivers/gevent.py
new file mode 100644
index 0000000..024dd0a
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/gevent.py
@@ -0,0 +1,63 @@
+from __future__ import absolute_import
+
+import gevent
+from gevent import queue
+from gevent.event import Event
+try:
+ import geventwebsocket # noqa
+ _websocket_available = True
+except ImportError:
+ _websocket_available = False
+
+
+class Thread(gevent.Greenlet): # pragma: no cover
+ """
+ This wrapper class provides gevent Greenlet interface that is compatible
+ with the standard library's Thread class.
+ """
+ def __init__(self, target, args=[], kwargs={}):
+ super(Thread, self).__init__(target, *args, **kwargs)
+
+ def _run(self):
+ return self.run()
+
+
+class WebSocketWSGI(object): # pragma: no cover
+ """
+ This wrapper class provides a gevent WebSocket interface that is
+ compatible with eventlet's implementation.
+ """
+ def __init__(self, app):
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ if 'wsgi.websocket' not in environ:
+ raise RuntimeError('You need to use the gevent-websocket server. '
+ 'See the Deployment section of the '
+ 'documentation for more information.')
+ self._sock = environ['wsgi.websocket']
+ self.environ = environ
+ self.version = self._sock.version
+ self.path = self._sock.path
+ self.origin = self._sock.origin
+ self.protocol = self._sock.protocol
+ return self.app(self)
+
+ def close(self):
+ return self._sock.close()
+
+ def send(self, message):
+ return self._sock.send(message)
+
+ def wait(self):
+ return self._sock.receive()
+
+
+_async = {
+ 'thread': Thread,
+ 'queue': queue.JoinableQueue,
+ 'queue_empty': queue.Empty,
+ 'event': Event,
+ 'websocket': WebSocketWSGI if _websocket_available else None,
+ 'sleep': gevent.sleep,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/gevent_uwsgi.py b/matteo_env/Lib/site-packages/engineio/async_drivers/gevent_uwsgi.py
new file mode 100644
index 0000000..bdee812
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/gevent_uwsgi.py
@@ -0,0 +1,154 @@
+from __future__ import absolute_import
+
+import gevent
+from gevent import queue
+from gevent.event import Event
+import uwsgi
+_websocket_available = hasattr(uwsgi, 'websocket_handshake')
+
+
+class Thread(gevent.Greenlet): # pragma: no cover
+ """
+ This wrapper class provides gevent Greenlet interface that is compatible
+ with the standard library's Thread class.
+ """
+ def __init__(self, target, args=[], kwargs={}):
+ super(Thread, self).__init__(target, *args, **kwargs)
+
+ def _run(self):
+ return self.run()
+
+
+class uWSGIWebSocket(object): # pragma: no cover
+ """
+ This wrapper class provides a uWSGI WebSocket interface that is
+ compatible with eventlet's implementation.
+ """
+ def __init__(self, app):
+ self.app = app
+ self._sock = None
+
+ def __call__(self, environ, start_response):
+ self._sock = uwsgi.connection_fd()
+ self.environ = environ
+
+ uwsgi.websocket_handshake()
+
+ self._req_ctx = None
+ if hasattr(uwsgi, 'request_context'):
+ # uWSGI >= 2.1.x with support for api access across-greenlets
+ self._req_ctx = uwsgi.request_context()
+ else:
+ # use event and queue for sending messages
+ from gevent.event import Event
+ from gevent.queue import Queue
+ from gevent.select import select
+ self._event = Event()
+ self._send_queue = Queue()
+
+ # spawn a select greenlet
+ def select_greenlet_runner(fd, event):
+ """Sets event when data becomes available to read on fd."""
+ while True:
+ event.set()
+ try:
+ select([fd], [], [])[0]
+ except ValueError:
+ break
+ self._select_greenlet = gevent.spawn(
+ select_greenlet_runner,
+ self._sock,
+ self._event)
+
+ self.app(self)
+
+ def close(self):
+ """Disconnects uWSGI from the client."""
+ uwsgi.disconnect()
+ if self._req_ctx is None:
+ # better kill it here in case wait() is not called again
+ self._select_greenlet.kill()
+ self._event.set()
+
+ def _send(self, msg):
+ """Transmits message either in binary or UTF-8 text mode,
+ depending on its type."""
+ if isinstance(msg, bytes):
+ method = uwsgi.websocket_send_binary
+ else:
+ method = uwsgi.websocket_send
+ if self._req_ctx is not None:
+ method(msg, request_context=self._req_ctx)
+ else:
+ method(msg)
+
+ def _decode_received(self, msg):
+ """Returns either bytes or str, depending on message type."""
+ if not isinstance(msg, bytes):
+ # already decoded - do nothing
+ return msg
+ # only decode from utf-8 if message is not binary data
+ type = ord(msg[0:1])
+ if type >= 48: # no binary
+ return msg.decode('utf-8')
+ # binary message, don't try to decode
+ return msg
+
+ def send(self, msg):
+ """Queues a message for sending. Real transmission is done in
+ wait method.
+ Sends directly if uWSGI version is new enough."""
+ if self._req_ctx is not None:
+ self._send(msg)
+ else:
+ self._send_queue.put(msg)
+ self._event.set()
+
+ def wait(self):
+ """Waits and returns received messages.
+ If running in compatibility mode for older uWSGI versions,
+ it also sends messages that have been queued by send().
+ A return value of None means that connection was closed.
+ This must be called repeatedly. For uWSGI < 2.1.x it must
+ be called from the main greenlet."""
+ while True:
+ if self._req_ctx is not None:
+ try:
+ msg = uwsgi.websocket_recv(request_context=self._req_ctx)
+ except IOError: # connection closed
+ return None
+ return self._decode_received(msg)
+ else:
+ # we wake up at least every 3 seconds to let uWSGI
+ # do its ping/ponging
+ event_set = self._event.wait(timeout=3)
+ if event_set:
+ self._event.clear()
+ # maybe there is something to send
+ msgs = []
+ while True:
+ try:
+ msgs.append(self._send_queue.get(block=False))
+ except gevent.queue.Empty:
+ break
+ for msg in msgs:
+ self._send(msg)
+ # maybe there is something to receive, if not, at least
+ # ensure uWSGI does its ping/ponging
+ try:
+ msg = uwsgi.websocket_recv_nb()
+ except IOError: # connection closed
+ self._select_greenlet.kill()
+ return None
+ if msg: # message available
+ return self._decode_received(msg)
+
+
+_async = {
+ 'thread': Thread,
+ 'queue': queue.JoinableQueue,
+ 'queue_empty': queue.Empty,
+ 'event': Event,
+ 'websocket': uWSGIWebSocket if _websocket_available else None,
+ 'sleep': gevent.sleep,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/sanic.py b/matteo_env/Lib/site-packages/engineio/async_drivers/sanic.py
new file mode 100644
index 0000000..2400224
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/sanic.py
@@ -0,0 +1,143 @@
+import sys
+from urllib.parse import urlsplit
+
+try: # pragma: no cover
+ from sanic.response import HTTPResponse
+ from sanic.websocket import WebSocketProtocol
+except ImportError:
+ HTTPResponse = None
+ WebSocketProtocol = None
+
+
+def create_route(app, engineio_server, engineio_endpoint): # pragma: no cover
+ """This function sets up the engine.io endpoint as a route for the
+ application.
+
+ Note that both GET and POST requests must be hooked up on the engine.io
+ endpoint.
+ """
+ app.add_route(engineio_server.handle_request, engineio_endpoint,
+ methods=['GET', 'POST', 'OPTIONS'])
+ try:
+ app.enable_websocket()
+ except AttributeError:
+ # ignore, this version does not support websocket
+ pass
+
+
+def translate_request(request): # pragma: no cover
+ """This function takes the arguments passed to the request handler and
+ uses them to generate a WSGI compatible environ dictionary.
+ """
+ class AwaitablePayload(object):
+ def __init__(self, payload):
+ self.payload = payload or b''
+
+ async def read(self, length=None):
+ if length is None:
+ r = self.payload
+ self.payload = b''
+ else:
+ r = self.payload[:length]
+ self.payload = self.payload[length:]
+ return r
+
+ uri_parts = urlsplit(request.url)
+ environ = {
+ 'wsgi.input': AwaitablePayload(request.body),
+ 'wsgi.errors': sys.stderr,
+ 'wsgi.version': (1, 0),
+ 'wsgi.async': True,
+ 'wsgi.multithread': False,
+ 'wsgi.multiprocess': False,
+ 'wsgi.run_once': False,
+ 'SERVER_SOFTWARE': 'sanic',
+ 'REQUEST_METHOD': request.method,
+ 'QUERY_STRING': uri_parts.query or '',
+ 'RAW_URI': request.url,
+ 'SERVER_PROTOCOL': 'HTTP/' + request.version,
+ 'REMOTE_ADDR': '127.0.0.1',
+ 'REMOTE_PORT': '0',
+ 'SERVER_NAME': 'sanic',
+ 'SERVER_PORT': '0',
+ 'sanic.request': request
+ }
+
+ for hdr_name, hdr_value in request.headers.items():
+ hdr_name = hdr_name.upper()
+ if hdr_name == 'CONTENT-TYPE':
+ environ['CONTENT_TYPE'] = hdr_value
+ continue
+ elif hdr_name == 'CONTENT-LENGTH':
+ environ['CONTENT_LENGTH'] = hdr_value
+ continue
+
+ key = 'HTTP_%s' % hdr_name.replace('-', '_')
+ if key in environ:
+ hdr_value = '%s,%s' % (environ[key], hdr_value)
+
+ environ[key] = hdr_value
+
+ environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
+
+ path_info = uri_parts.path
+
+ environ['PATH_INFO'] = path_info
+ environ['SCRIPT_NAME'] = ''
+
+ return environ
+
+
+def make_response(status, headers, payload, environ): # pragma: no cover
+ """This function generates an appropriate response object for this async
+ mode.
+ """
+ headers_dict = {}
+ content_type = None
+ for h in headers:
+ if h[0].lower() == 'content-type':
+ content_type = h[1]
+ else:
+ headers_dict[h[0]] = h[1]
+ return HTTPResponse(body_bytes=payload, content_type=content_type,
+ status=int(status.split()[0]), headers=headers_dict)
+
+
+class WebSocket(object): # pragma: no cover
+ """
+ This wrapper class provides a sanic WebSocket interface that is
+ somewhat compatible with eventlet's implementation.
+ """
+ def __init__(self, handler):
+ self.handler = handler
+ self._sock = None
+
+ async def __call__(self, environ):
+ request = environ['sanic.request']
+ protocol = request.transport.get_protocol()
+ self._sock = await protocol.websocket_handshake(request)
+
+ self.environ = environ
+ await self.handler(self)
+
+ async def close(self):
+ await self._sock.close()
+
+ async def send(self, message):
+ await self._sock.send(message)
+
+ async def wait(self):
+ data = await self._sock.recv()
+ if not isinstance(data, bytes) and \
+ not isinstance(data, str):
+ raise IOError()
+ return data
+
+
+_async = {
+ 'asyncio': True,
+ 'create_route': create_route,
+ 'translate_request': translate_request,
+ 'make_response': make_response,
+ 'websocket': WebSocket if WebSocketProtocol else None,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/threading.py b/matteo_env/Lib/site-packages/engineio/async_drivers/threading.py
new file mode 100644
index 0000000..9b53756
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/threading.py
@@ -0,0 +1,17 @@
+from __future__ import absolute_import
+import threading
+import time
+
+try:
+ import queue
+except ImportError: # pragma: no cover
+ import Queue as queue
+
+_async = {
+ 'thread': threading.Thread,
+ 'queue': queue.Queue,
+ 'queue_empty': queue.Empty,
+ 'event': threading.Event,
+ 'websocket': None,
+ 'sleep': time.sleep,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/async_drivers/tornado.py b/matteo_env/Lib/site-packages/engineio/async_drivers/tornado.py
new file mode 100644
index 0000000..eb1c4de
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/async_drivers/tornado.py
@@ -0,0 +1,182 @@
+import asyncio
+import sys
+from urllib.parse import urlsplit
+from .. import exceptions
+
+import tornado.web
+import tornado.websocket
+
+
+def get_tornado_handler(engineio_server):
+ class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ if isinstance(engineio_server.cors_allowed_origins, str):
+ if engineio_server.cors_allowed_origins == '*':
+ self.allowed_origins = None
+ else:
+ self.allowed_origins = [
+ engineio_server.cors_allowed_origins]
+ else:
+ self.allowed_origins = engineio_server.cors_allowed_origins
+ self.receive_queue = asyncio.Queue()
+
+ async def get(self, *args, **kwargs):
+ if self.request.headers.get('Upgrade', '').lower() == 'websocket':
+ ret = super().get(*args, **kwargs)
+ if asyncio.iscoroutine(ret):
+ await ret
+ else:
+ await engineio_server.handle_request(self)
+
+ async def open(self, *args, **kwargs):
+ # this is the handler for the websocket request
+ asyncio.ensure_future(engineio_server.handle_request(self))
+
+ async def post(self, *args, **kwargs):
+ await engineio_server.handle_request(self)
+
+ async def options(self, *args, **kwargs):
+ await engineio_server.handle_request(self)
+
+ async def on_message(self, message):
+ await self.receive_queue.put(message)
+
+ async def get_next_message(self):
+ return await self.receive_queue.get()
+
+ def on_close(self):
+ self.receive_queue.put_nowait(None)
+
+ def check_origin(self, origin):
+ if self.allowed_origins is None or origin in self.allowed_origins:
+ return True
+ return super().check_origin(origin)
+
+ def get_compression_options(self):
+ # enable compression
+ return {}
+
+ return Handler
+
+
+def translate_request(handler):
+ """This function takes the arguments passed to the request handler and
+ uses them to generate a WSGI compatible environ dictionary.
+ """
+ class AwaitablePayload(object):
+ def __init__(self, payload):
+ self.payload = payload or b''
+
+ async def read(self, length=None):
+ if length is None:
+ r = self.payload
+ self.payload = b''
+ else:
+ r = self.payload[:length]
+ self.payload = self.payload[length:]
+ return r
+
+ payload = handler.request.body
+
+ uri_parts = urlsplit(handler.request.path)
+ full_uri = handler.request.path
+ if handler.request.query: # pragma: no cover
+ full_uri += '?' + handler.request.query
+ environ = {
+ 'wsgi.input': AwaitablePayload(payload),
+ 'wsgi.errors': sys.stderr,
+ 'wsgi.version': (1, 0),
+ 'wsgi.async': True,
+ 'wsgi.multithread': False,
+ 'wsgi.multiprocess': False,
+ 'wsgi.run_once': False,
+ 'SERVER_SOFTWARE': 'aiohttp',
+ 'REQUEST_METHOD': handler.request.method,
+ 'QUERY_STRING': handler.request.query or '',
+ 'RAW_URI': full_uri,
+ 'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version,
+ 'REMOTE_ADDR': '127.0.0.1',
+ 'REMOTE_PORT': '0',
+ 'SERVER_NAME': 'aiohttp',
+ 'SERVER_PORT': '0',
+ 'tornado.handler': handler
+ }
+
+ for hdr_name, hdr_value in handler.request.headers.items():
+ hdr_name = hdr_name.upper()
+ if hdr_name == 'CONTENT-TYPE':
+ environ['CONTENT_TYPE'] = hdr_value
+ continue
+ elif hdr_name == 'CONTENT-LENGTH':
+ environ['CONTENT_LENGTH'] = hdr_value
+ continue
+
+ key = 'HTTP_%s' % hdr_name.replace('-', '_')
+ environ[key] = hdr_value
+
+ environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
+
+ path_info = uri_parts.path
+
+ environ['PATH_INFO'] = path_info
+ environ['SCRIPT_NAME'] = ''
+
+ return environ
+
+
+def make_response(status, headers, payload, environ):
+ """This function generates an appropriate response object for this async
+ mode.
+ """
+ tornado_handler = environ['tornado.handler']
+ try:
+ tornado_handler.set_status(int(status.split()[0]))
+ except RuntimeError: # pragma: no cover
+ # for websocket connections Tornado does not accept a response, since
+ # it already emitted the 101 status code
+ return
+ for header, value in headers:
+ tornado_handler.set_header(header, value)
+ tornado_handler.write(payload)
+ tornado_handler.finish()
+
+
+class WebSocket(object): # pragma: no cover
+ """
+ This wrapper class provides a tornado WebSocket interface that is
+ somewhat compatible with eventlet's implementation.
+ """
+ def __init__(self, handler):
+ self.handler = handler
+ self.tornado_handler = None
+
+ async def __call__(self, environ):
+ self.tornado_handler = environ['tornado.handler']
+ self.environ = environ
+ await self.handler(self)
+
+ async def close(self):
+ self.tornado_handler.close()
+
+ async def send(self, message):
+ try:
+ self.tornado_handler.write_message(
+ message, binary=isinstance(message, bytes))
+ except tornado.websocket.WebSocketClosedError:
+ raise exceptions.EngineIOError()
+
+ async def wait(self):
+ msg = await self.tornado_handler.get_next_message()
+ if not isinstance(msg, bytes) and \
+ not isinstance(msg, str):
+ raise IOError()
+ return msg
+
+
+_async = {
+ 'asyncio': True,
+ 'translate_request': translate_request,
+ 'make_response': make_response,
+ 'websocket': WebSocket,
+}
diff --git a/matteo_env/Lib/site-packages/engineio/asyncio_client.py b/matteo_env/Lib/site-packages/engineio/asyncio_client.py
new file mode 100644
index 0000000..57691d6
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/asyncio_client.py
@@ -0,0 +1,608 @@
+import asyncio
+import signal
+import ssl
+import threading
+
+try:
+ import aiohttp
+except ImportError: # pragma: no cover
+ aiohttp = None
+
+from . import client
+from . import exceptions
+from . import packet
+from . import payload
+
+async_signal_handler_set = False
+
+
+def async_signal_handler():
+ """SIGINT handler.
+
+ Disconnect all active async clients.
+ """
+ async def _handler():
+ asyncio.get_event_loop().stop()
+ for c in client.connected_clients[:]:
+ if c.is_asyncio_based():
+ await c.disconnect()
+ else: # pragma: no cover
+ pass
+
+ asyncio.ensure_future(_handler())
+
+
+class AsyncClient(client.Client):
+ """An Engine.IO client for asyncio.
+
+ This class implements a fully compliant Engine.IO web client with support
+ for websocket and long-polling transports, compatible with the asyncio
+ framework on Python 3.5 or newer.
+
+ :param logger: To enable logging set to ``True`` or pass a logger object to
+ use. To disable logging set to ``False``. The default is
+ ``False``. Note that fatal errors are logged even when
+ ``logger`` is ``False``.
+ :param json: An alternative json module to use for encoding and decoding
+ packets. Custom json modules must have ``dumps`` and ``loads``
+ functions that are compatible with the standard library
+ versions.
+ :param request_timeout: A timeout in seconds for requests. The default is
+ 5 seconds.
+ :param http_session: an initialized ``aiohttp.ClientSession`` object to be
+ used when sending requests to the server. Use it if
+ you need to add special client options such as proxy
+ servers, SSL certificates, etc.
+ :param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
+ skip SSL certificate verification, allowing
+ connections to servers with self signed certificates.
+ The default is ``True``.
+ """
+ def is_asyncio_based(self):
+ return True
+
+ async def connect(self, url, headers=None, transports=None,
+ engineio_path='engine.io'):
+ """Connect to an Engine.IO server.
+
+ :param url: The URL of the Engine.IO server. It can include custom
+ query string parameters if required by the server.
+ :param headers: A dictionary with custom headers to send with the
+ connection request.
+ :param transports: The list of allowed transports. Valid transports
+ are ``'polling'`` and ``'websocket'``. If not
+ given, the polling transport is connected first,
+ then an upgrade to websocket is attempted.
+ :param engineio_path: The endpoint where the Engine.IO server is
+ installed. The default value is appropriate for
+ most cases.
+
+ Note: this method is a coroutine.
+
+ Example usage::
+
+ eio = engineio.Client()
+ await eio.connect('http://localhost:5000')
+ """
+ global async_signal_handler_set
+ if not async_signal_handler_set and \
+ threading.current_thread() == threading.main_thread():
+
+ try:
+ asyncio.get_event_loop().add_signal_handler(
+ signal.SIGINT, async_signal_handler)
+ async_signal_handler_set = True
+ except NotImplementedError: # pragma: no cover
+ self.logger.warning('Signal handler is unsupported')
+
+ if self.state != 'disconnected':
+ raise ValueError('Client is not in a disconnected state')
+ valid_transports = ['polling', 'websocket']
+ if transports is not None:
+ if isinstance(transports, str):
+ transports = [transports]
+ transports = [transport for transport in transports
+ if transport in valid_transports]
+ if not transports:
+ raise ValueError('No valid transports provided')
+ self.transports = transports or valid_transports
+ self.queue = self.create_queue()
+ return await getattr(self, '_connect_' + self.transports[0])(
+ url, headers or {}, engineio_path)
+
+ async def wait(self):
+ """Wait until the connection with the server ends.
+
+ Client applications can use this function to block the main thread
+ during the life of the connection.
+
+ Note: this method is a coroutine.
+ """
+ if self.read_loop_task:
+ await self.read_loop_task
+
+ async def send(self, data):
+ """Send a message to a client.
+
+ :param data: The data to send to the client. Data can be of type
+ ``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
+ or ``dict``, the data will be serialized as JSON.
+
+ Note: this method is a coroutine.
+ """
+ await self._send_packet(packet.Packet(packet.MESSAGE, data=data))
+
+ async def disconnect(self, abort=False):
+ """Disconnect from the server.
+
+ :param abort: If set to ``True``, do not wait for background tasks
+ associated with the connection to end.
+
+ Note: this method is a coroutine.
+ """
+ if self.state == 'connected':
+ await self._send_packet(packet.Packet(packet.CLOSE))
+ await self.queue.put(None)
+ self.state = 'disconnecting'
+ await self._trigger_event('disconnect', run_async=False)
+ if self.current_transport == 'websocket':
+ await self.ws.close()
+ if not abort:
+ await self.read_loop_task
+ self.state = 'disconnected'
+ try:
+ client.connected_clients.remove(self)
+ except ValueError: # pragma: no cover
+ pass
+ self._reset()
+
+ def start_background_task(self, target, *args, **kwargs):
+ """Start a background task.
+
+ This is a utility function that applications can use to start a
+ background task.
+
+ :param target: the target function to execute.
+ :param args: arguments to pass to the function.
+ :param kwargs: keyword arguments to pass to the function.
+
+ This function returns an object compatible with the `Thread` class in
+ the Python standard library. The `start()` method on this object is
+ already called by this function.
+
+ Note: this method is a coroutine.
+ """
+ return asyncio.ensure_future(target(*args, **kwargs))
+
+ async def sleep(self, seconds=0):
+ """Sleep for the requested amount of time.
+
+ Note: this method is a coroutine.
+ """
+ return await asyncio.sleep(seconds)
+
+ def create_queue(self):
+ """Create a queue object."""
+ q = asyncio.Queue()
+ q.Empty = asyncio.QueueEmpty
+ return q
+
+ def create_event(self):
+ """Create an event object."""
+ return asyncio.Event()
+
+ def _reset(self):
+ if self.http: # pragma: no cover
+ asyncio.ensure_future(self.http.close())
+ super()._reset()
+
+ async def _connect_polling(self, url, headers, engineio_path):
+ """Establish a long-polling connection to the Engine.IO server."""
+ if aiohttp is None: # pragma: no cover
+ self.logger.error('aiohttp not installed -- cannot make HTTP '
+ 'requests!')
+ return
+ self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
+ self.logger.info('Attempting polling connection to ' + self.base_url)
+ r = await self._send_request(
+ 'GET', self.base_url + self._get_url_timestamp(), headers=headers,
+ timeout=self.request_timeout)
+ if r is None:
+ self._reset()
+ raise exceptions.ConnectionError(
+ 'Connection refused by the server')
+ if r.status < 200 or r.status >= 300:
+ self._reset()
+ try:
+ arg = await r.json()
+ except aiohttp.ClientError:
+ arg = None
+ raise exceptions.ConnectionError(
+ 'Unexpected status code {} in server response'.format(
+ r.status), arg)
+ try:
+ p = payload.Payload(encoded_payload=(await r.read()).decode(
+ 'utf-8'))
+ except ValueError:
+ raise exceptions.ConnectionError(
+ 'Unexpected response from server') from None
+ open_packet = p.packets[0]
+ if open_packet.packet_type != packet.OPEN:
+ raise exceptions.ConnectionError(
+ 'OPEN packet not returned by server')
+ self.logger.info(
+ 'Polling connection accepted with ' + str(open_packet.data))
+ self.sid = open_packet.data['sid']
+ self.upgrades = open_packet.data['upgrades']
+ self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
+ self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
+ self.current_transport = 'polling'
+ self.base_url += '&sid=' + self.sid
+
+ self.state = 'connected'
+ client.connected_clients.append(self)
+ await self._trigger_event('connect', run_async=False)
+
+ for pkt in p.packets[1:]:
+ await self._receive_packet(pkt)
+
+ if 'websocket' in self.upgrades and 'websocket' in self.transports:
+ # attempt to upgrade to websocket
+ if await self._connect_websocket(url, headers, engineio_path):
+ # upgrade to websocket succeeded, we're done here
+ return
+
+ self.write_loop_task = self.start_background_task(self._write_loop)
+ self.read_loop_task = self.start_background_task(
+ self._read_loop_polling)
+
+ async def _connect_websocket(self, url, headers, engineio_path):
+ """Establish or upgrade to a WebSocket connection with the server."""
+ if aiohttp is None: # pragma: no cover
+ self.logger.error('aiohttp package not installed')
+ return False
+ websocket_url = self._get_engineio_url(url, engineio_path,
+ 'websocket')
+ if self.sid:
+ self.logger.info(
+ 'Attempting WebSocket upgrade to ' + websocket_url)
+ upgrade = True
+ websocket_url += '&sid=' + self.sid
+ else:
+ upgrade = False
+ self.base_url = websocket_url
+ self.logger.info(
+ 'Attempting WebSocket connection to ' + websocket_url)
+
+ if self.http is None or self.http.closed: # pragma: no cover
+ self.http = aiohttp.ClientSession()
+
+ # extract any new cookies passed in a header so that they can also be
+ # sent the the WebSocket route
+ cookies = {}
+ for header, value in headers.items():
+ if header.lower() == 'cookie':
+ cookies = dict(
+ [cookie.split('=', 1) for cookie in value.split('; ')])
+ del headers[header]
+ break
+ self.http.cookie_jar.update_cookies(cookies)
+
+ try:
+ if not self.ssl_verify:
+ ssl_context = ssl.create_default_context()
+ ssl_context.check_hostname = False
+ ssl_context.verify_mode = ssl.CERT_NONE
+ ws = await self.http.ws_connect(
+ websocket_url + self._get_url_timestamp(),
+ headers=headers, ssl=ssl_context)
+ else:
+ ws = await self.http.ws_connect(
+ websocket_url + self._get_url_timestamp(),
+ headers=headers)
+ except (aiohttp.client_exceptions.WSServerHandshakeError,
+ aiohttp.client_exceptions.ServerConnectionError,
+ aiohttp.client_exceptions.ClientConnectionError):
+ if upgrade:
+ self.logger.warning(
+ 'WebSocket upgrade failed: connection error')
+ return False
+ else:
+ raise exceptions.ConnectionError('Connection error')
+ if upgrade:
+ p = packet.Packet(packet.PING, data='probe').encode()
+ try:
+ await ws.send_str(p)
+ except Exception as e: # pragma: no cover
+ self.logger.warning(
+ 'WebSocket upgrade failed: unexpected send exception: %s',
+ str(e))
+ return False
+ try:
+ p = (await ws.receive()).data
+ except Exception as e: # pragma: no cover
+ self.logger.warning(
+ 'WebSocket upgrade failed: unexpected recv exception: %s',
+ str(e))
+ return False
+ pkt = packet.Packet(encoded_packet=p)
+ if pkt.packet_type != packet.PONG or pkt.data != 'probe':
+ self.logger.warning(
+ 'WebSocket upgrade failed: no PONG packet')
+ return False
+ p = packet.Packet(packet.UPGRADE).encode()
+ try:
+ await ws.send_str(p)
+ except Exception as e: # pragma: no cover
+ self.logger.warning(
+ 'WebSocket upgrade failed: unexpected send exception: %s',
+ str(e))
+ return False
+ self.current_transport = 'websocket'
+ self.logger.info('WebSocket upgrade was successful')
+ else:
+ try:
+ p = (await ws.receive()).data
+ except Exception as e: # pragma: no cover
+ raise exceptions.ConnectionError(
+ 'Unexpected recv exception: ' + str(e))
+ open_packet = packet.Packet(encoded_packet=p)
+ if open_packet.packet_type != packet.OPEN:
+ raise exceptions.ConnectionError('no OPEN packet')
+ self.logger.info(
+ 'WebSocket connection accepted with ' + str(open_packet.data))
+ self.sid = open_packet.data['sid']
+ self.upgrades = open_packet.data['upgrades']
+ self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
+ self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
+ self.current_transport = 'websocket'
+
+ self.state = 'connected'
+ client.connected_clients.append(self)
+ await self._trigger_event('connect', run_async=False)
+
+ self.ws = ws
+ self.write_loop_task = self.start_background_task(self._write_loop)
+ self.read_loop_task = self.start_background_task(
+ self._read_loop_websocket)
+ return True
+
+ async def _receive_packet(self, pkt):
+ """Handle incoming packets from the server."""
+ packet_name = packet.packet_names[pkt.packet_type] \
+ if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
+ self.logger.info(
+ 'Received packet %s data %s', packet_name,
+ pkt.data if not isinstance(pkt.data, bytes) else '')
+ if pkt.packet_type == packet.MESSAGE:
+ await self._trigger_event('message', pkt.data, run_async=True)
+ elif pkt.packet_type == packet.PING:
+ await self._send_packet(packet.Packet(packet.PONG, pkt.data))
+ elif pkt.packet_type == packet.CLOSE:
+ await self.disconnect(abort=True)
+ elif pkt.packet_type == packet.NOOP:
+ pass
+ else:
+ self.logger.error('Received unexpected packet of type %s',
+ pkt.packet_type)
+
+ async def _send_packet(self, pkt):
+ """Queue a packet to be sent to the server."""
+ if self.state != 'connected':
+ return
+ await self.queue.put(pkt)
+ self.logger.info(
+ 'Sending packet %s data %s',
+ packet.packet_names[pkt.packet_type],
+ pkt.data if not isinstance(pkt.data, bytes) else '')
+
+ async def _send_request(
+ self, method, url, headers=None, body=None,
+ timeout=None): # pragma: no cover
+ if self.http is None or self.http.closed:
+ self.http = aiohttp.ClientSession()
+ http_method = getattr(self.http, method.lower())
+
+ try:
+ if not self.ssl_verify:
+ return await http_method(
+ url, headers=headers, data=body,
+ timeout=aiohttp.ClientTimeout(total=timeout), ssl=False)
+ else:
+ return await http_method(
+ url, headers=headers, data=body,
+ timeout=aiohttp.ClientTimeout(total=timeout))
+
+ except (aiohttp.ClientError, asyncio.TimeoutError) as exc:
+ self.logger.info('HTTP %s request to %s failed with error %s.',
+ method, url, exc)
+
+ async def _trigger_event(self, event, *args, **kwargs):
+ """Invoke an event handler."""
+ run_async = kwargs.pop('run_async', False)
+ ret = None
+ if event in self.handlers:
+ if asyncio.iscoroutinefunction(self.handlers[event]) is True:
+ if run_async:
+ return self.start_background_task(self.handlers[event],
+ *args)
+ else:
+ try:
+ ret = await self.handlers[event](*args)
+ except asyncio.CancelledError: # pragma: no cover
+ pass
+ except:
+ self.logger.exception(event + ' async handler error')
+ if event == 'connect':
+ # if connect handler raised error we reject the
+ # connection
+ return False
+ else:
+ if run_async:
+ async def async_handler():
+ return self.handlers[event](*args)
+
+ return self.start_background_task(async_handler)
+ else:
+ try:
+ ret = self.handlers[event](*args)
+ except:
+ self.logger.exception(event + ' handler error')
+ if event == 'connect':
+ # if connect handler raised error we reject the
+ # connection
+ return False
+ return ret
+
+ async def _read_loop_polling(self):
+ """Read packets by polling the Engine.IO server."""
+ while self.state == 'connected':
+ self.logger.info(
+ 'Sending polling GET request to ' + self.base_url)
+ r = await self._send_request(
+ 'GET', self.base_url + self._get_url_timestamp(),
+ timeout=max(self.ping_interval, self.ping_timeout) + 5)
+ if r is None:
+ self.logger.warning(
+ 'Connection refused by the server, aborting')
+ await self.queue.put(None)
+ break
+ if r.status < 200 or r.status >= 300:
+ self.logger.warning('Unexpected status code %s in server '
+ 'response, aborting', r.status)
+ await self.queue.put(None)
+ break
+ try:
+ p = payload.Payload(encoded_payload=(await r.read()).decode(
+ 'utf-8'))
+ except ValueError:
+ self.logger.warning(
+ 'Unexpected packet from server, aborting')
+ await self.queue.put(None)
+ break
+ for pkt in p.packets:
+ await self._receive_packet(pkt)
+
+ self.logger.info('Waiting for write loop task to end')
+ await self.write_loop_task
+ if self.state == 'connected':
+ await self._trigger_event('disconnect', run_async=False)
+ try:
+ client.connected_clients.remove(self)
+ except ValueError: # pragma: no cover
+ pass
+ self._reset()
+ self.logger.info('Exiting read loop task')
+
+ async def _read_loop_websocket(self):
+ """Read packets from the Engine.IO WebSocket connection."""
+ while self.state == 'connected':
+ p = None
+ try:
+ p = await asyncio.wait_for(
+ self.ws.receive(),
+ timeout=self.ping_interval + self.ping_timeout)
+ p = p.data
+ if p is None: # pragma: no cover
+ break # the connection is broken
+ except asyncio.TimeoutError:
+ self.logger.warning(
+ 'Server has stopped communicating, aborting')
+ await self.queue.put(None)
+ break
+ except aiohttp.client_exceptions.ServerDisconnectedError:
+ self.logger.info(
+ 'Read loop: WebSocket connection was closed, aborting')
+ await self.queue.put(None)
+ break
+ except Exception as e:
+ self.logger.info(
+ 'Unexpected error receiving packet: "%s", aborting',
+ str(e))
+ await self.queue.put(None)
+ break
+ try:
+ pkt = packet.Packet(encoded_packet=p)
+ except Exception as e: # pragma: no cover
+ self.logger.info(
+ 'Unexpected error decoding packet: "%s", aborting', str(e))
+ await self.queue.put(None)
+ break
+ await self._receive_packet(pkt)
+
+ self.logger.info('Waiting for write loop task to end')
+ await self.write_loop_task
+ if self.state == 'connected':
+ await self._trigger_event('disconnect', run_async=False)
+ try:
+ client.connected_clients.remove(self)
+ except ValueError: # pragma: no cover
+ pass
+ self._reset()
+ self.logger.info('Exiting read loop task')
+
+ async def _write_loop(self):
+ """This background task sends packages to the server as they are
+ pushed to the send queue.
+ """
+ while self.state == 'connected':
+ # to simplify the timeout handling, use the maximum of the
+ # ping interval and ping timeout as timeout, with an extra 5
+ # seconds grace period
+ timeout = max(self.ping_interval, self.ping_timeout) + 5
+ packets = None
+ try:
+ packets = [await asyncio.wait_for(self.queue.get(), timeout)]
+ except (self.queue.Empty, asyncio.TimeoutError,
+ asyncio.CancelledError):
+ self.logger.error('packet queue is empty, aborting')
+ break
+ if packets == [None]:
+ self.queue.task_done()
+ packets = []
+ else:
+ while True:
+ try:
+ packets.append(self.queue.get_nowait())
+ except self.queue.Empty:
+ break
+ if packets[-1] is None:
+ packets = packets[:-1]
+ self.queue.task_done()
+ break
+ if not packets:
+ # empty packet list returned -> connection closed
+ break
+ if self.current_transport == 'polling':
+ p = payload.Payload(packets=packets)
+ r = await self._send_request(
+ 'POST', self.base_url, body=p.encode(),
+ headers={'Content-Type': 'application/octet-stream'},
+ timeout=self.request_timeout)
+ for pkt in packets:
+ self.queue.task_done()
+ if r is None:
+ self.logger.warning(
+ 'Connection refused by the server, aborting')
+ break
+ if r.status < 200 or r.status >= 300:
+ self.logger.warning('Unexpected status code %s in server '
+ 'response, aborting', r.status)
+ self._reset()
+ break
+ else:
+ # websocket
+ try:
+ for pkt in packets:
+ if pkt.binary:
+ await self.ws.send_bytes(pkt.encode())
+ else:
+ await self.ws.send_str(pkt.encode())
+ self.queue.task_done()
+ except (aiohttp.client_exceptions.ServerDisconnectedError,
+ BrokenPipeError, OSError):
+ self.logger.info(
+ 'Write loop: WebSocket connection was closed, '
+ 'aborting')
+ break
+ self.logger.info('Exiting write loop task')
diff --git a/matteo_env/Lib/site-packages/engineio/asyncio_server.py b/matteo_env/Lib/site-packages/engineio/asyncio_server.py
new file mode 100644
index 0000000..6639f26
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/asyncio_server.py
@@ -0,0 +1,505 @@
+import asyncio
+import urllib
+
+from . import exceptions
+from . import packet
+from . import server
+from . import asyncio_socket
+
+
+class AsyncServer(server.Server):
+ """An Engine.IO server for asyncio.
+
+ This class implements a fully compliant Engine.IO web server with support
+ for websocket and long-polling transports, compatible with the asyncio
+ framework on Python 3.5 or newer.
+
+ :param async_mode: The asynchronous model to use. See the Deployment
+ section in the documentation for a description of the
+ available options. Valid async modes are "aiohttp",
+ "sanic", "tornado" and "asgi". If this argument is not
+ given, "aiohttp" is tried first, followed by "sanic",
+ "tornado", and finally "asgi". The first async mode that
+ has all its dependencies installed is the one that is
+ chosen.
+ :param ping_interval: The interval in seconds at which the server pings
+ the client. The default is 25 seconds. For advanced
+ control, a two element tuple can be given, where
+ the first number is the ping interval and the second
+ is a grace period added by the server.
+ :param ping_timeout: The time in seconds that the client waits for the
+ server to respond before disconnecting. The default
+ is 5 seconds.
+ :param max_http_buffer_size: The maximum size of a message when using the
+ polling transport. The default is 1,000,000
+ bytes.
+ :param allow_upgrades: Whether to allow transport upgrades or not.
+ :param http_compression: Whether to compress packages when using the
+ polling transport.
+ :param compression_threshold: Only compress messages when their byte size
+ is greater than this value.
+ :param cookie: If set to a string, it is the name of the HTTP cookie the
+ server sends back tot he client containing the client
+ session id. If set to a dictionary, the ``'name'`` key
+ contains the cookie name and other keys define cookie
+ attributes, where the value of each attribute can be a
+ string, a callable with no arguments, or a boolean. If set
+ to ``None`` (the default), a cookie is not sent to the
+ client.
+ :param cors_allowed_origins: Origin or list of origins that are allowed to
+ connect to this server. Only the same origin
+ is allowed by default. Set this argument to
+ ``'*'`` to allow all origins, or to ``[]`` to
+ disable CORS handling.
+ :param cors_credentials: Whether credentials (cookies, authentication) are
+ allowed in requests to this server.
+ :param logger: To enable logging set to ``True`` or pass a logger object to
+ use. To disable logging set to ``False``. Note that fatal
+ errors are logged even when ``logger`` is ``False``.
+ :param json: An alternative json module to use for encoding and decoding
+ packets. Custom json modules must have ``dumps`` and ``loads``
+ functions that are compatible with the standard library
+ versions.
+ :param async_handlers: If set to ``True``, run message event handlers in
+ non-blocking threads. To run handlers synchronously,
+ set to ``False``. The default is ``True``.
+ :param kwargs: Reserved for future extensions, any additional parameters
+ given as keyword arguments will be silently ignored.
+ """
+ def is_asyncio_based(self):
+ return True
+
+ def async_modes(self):
+ return ['aiohttp', 'sanic', 'tornado', 'asgi']
+
+ def attach(self, app, engineio_path='engine.io'):
+ """Attach the Engine.IO server to an application."""
+ engineio_path = engineio_path.strip('/')
+ self._async['create_route'](app, self, '/{}/'.format(engineio_path))
+
+ async def send(self, sid, data):
+ """Send a message to a client.
+
+ :param sid: The session id of the recipient client.
+ :param data: The data to send to the client. Data can be of type
+ ``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
+ or ``dict``, the data will be serialized as JSON.
+
+ Note: this method is a coroutine.
+ """
+ try:
+ socket = self._get_socket(sid)
+ except KeyError:
+ # the socket is not available
+ self.logger.warning('Cannot send to sid %s', sid)
+ return
+ await socket.send(packet.Packet(packet.MESSAGE, data=data))
+
+ async def get_session(self, sid):
+ """Return the user session for a client.
+
+ :param sid: The session id of the client.
+
+ The return value is a dictionary. Modifications made to this
+ dictionary are not guaranteed to be preserved. If you want to modify
+ the user session, use the ``session`` context manager instead.
+ """
+ socket = self._get_socket(sid)
+ return socket.session
+
+ async def save_session(self, sid, session):
+ """Store the user session for a client.
+
+ :param sid: The session id of the client.
+ :param session: The session dictionary.
+ """
+ socket = self._get_socket(sid)
+ socket.session = session
+
+ def session(self, sid):
+ """Return the user session for a client with context manager syntax.
+
+ :param sid: The session id of the client.
+
+ This is a context manager that returns the user session dictionary for
+ the client. Any changes that are made to this dictionary inside the
+ context manager block are saved back to the session. Example usage::
+
+ @eio.on('connect')
+ def on_connect(sid, environ):
+ username = authenticate_user(environ)
+ if not username:
+ return False
+ with eio.session(sid) as session:
+ session['username'] = username
+
+ @eio.on('message')
+ def on_message(sid, msg):
+ async with eio.session(sid) as session:
+ print('received message from ', session['username'])
+ """
+ class _session_context_manager(object):
+ def __init__(self, server, sid):
+ self.server = server
+ self.sid = sid
+ self.session = None
+
+ async def __aenter__(self):
+ self.session = await self.server.get_session(sid)
+ return self.session
+
+ async def __aexit__(self, *args):
+ await self.server.save_session(sid, self.session)
+
+ return _session_context_manager(self, sid)
+
+ async def disconnect(self, sid=None):
+ """Disconnect a client.
+
+ :param sid: The session id of the client to close. If this parameter
+ is not given, then all clients are closed.
+
+ Note: this method is a coroutine.
+ """
+ if sid is not None:
+ try:
+ socket = self._get_socket(sid)
+ except KeyError: # pragma: no cover
+ # the socket was already closed or gone
+ pass
+ else:
+ await socket.close()
+ if sid in self.sockets: # pragma: no cover
+ del self.sockets[sid]
+ else:
+ await asyncio.wait([client.close()
+ for client in self.sockets.values()])
+ self.sockets = {}
+
+ async def handle_request(self, *args, **kwargs):
+ """Handle an HTTP request from the client.
+
+ This is the entry point of the Engine.IO application. This function
+ returns the HTTP response to deliver to the client.
+
+ Note: this method is a coroutine.
+ """
+ translate_request = self._async['translate_request']
+ if asyncio.iscoroutinefunction(translate_request):
+ environ = await translate_request(*args, **kwargs)
+ else:
+ environ = translate_request(*args, **kwargs)
+
+ if self.cors_allowed_origins != []:
+ # Validate the origin header if present
+ # This is important for WebSocket more than for HTTP, since
+ # browsers only apply CORS controls to HTTP.
+ origin = environ.get('HTTP_ORIGIN')
+ if origin:
+ allowed_origins = self._cors_allowed_origins(environ)
+ if allowed_origins is not None and origin not in \
+ allowed_origins:
+ self._log_error_once(
+ origin + ' is not an accepted origin.', 'bad-origin')
+ return await self._make_response(
+ self._bad_request(
+ origin + ' is not an accepted origin.'),
+ environ)
+
+ method = environ['REQUEST_METHOD']
+ query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
+
+ sid = query['sid'][0] if 'sid' in query else None
+ jsonp = False
+ jsonp_index = None
+
+ # make sure the client speaks a compatible Engine.IO version
+ sid = query['sid'][0] if 'sid' in query else None
+ if sid is None and query.get('EIO') != ['4']:
+ self._log_error_once(
+ 'The client is using an unsupported version of the Socket.IO '
+ 'or Engine.IO protocols', 'bad-version'
+ )
+ return await self._make_response(self._bad_request(
+ 'The client is using an unsupported version of the Socket.IO '
+ 'or Engine.IO protocols'
+ ), environ)
+
+ if 'j' in query:
+ jsonp = True
+ try:
+ jsonp_index = int(query['j'][0])
+ except (ValueError, KeyError, IndexError):
+ # Invalid JSONP index number
+ pass
+
+ if jsonp and jsonp_index is None:
+ self._log_error_once('Invalid JSONP index number',
+ 'bad-jsonp-index')
+ r = self._bad_request('Invalid JSONP index number')
+ elif method == 'GET':
+ if sid is None:
+ transport = query.get('transport', ['polling'])[0]
+ # transport must be one of 'polling' or 'websocket'.
+ # if 'websocket', the HTTP_UPGRADE header must match.
+ upgrade_header = environ.get('HTTP_UPGRADE').lower() \
+ if 'HTTP_UPGRADE' in environ else None
+ if transport == 'polling' \
+ or transport == upgrade_header == 'websocket':
+ r = await self._handle_connect(environ, transport,
+ jsonp_index)
+ else:
+ self._log_error_once('Invalid transport ' + transport,
+ 'bad-transport')
+ r = self._bad_request('Invalid transport ' + transport)
+ else:
+ if sid not in self.sockets:
+ self._log_error_once('Invalid session ' + sid, 'bad-sid')
+ r = self._bad_request('Invalid session ' + sid)
+ else:
+ socket = self._get_socket(sid)
+ try:
+ packets = await socket.handle_get_request(environ)
+ if isinstance(packets, list):
+ r = self._ok(packets, jsonp_index=jsonp_index)
+ else:
+ r = packets
+ except exceptions.EngineIOError:
+ if sid in self.sockets: # pragma: no cover
+ await self.disconnect(sid)
+ r = self._bad_request()
+ if sid in self.sockets and self.sockets[sid].closed:
+ del self.sockets[sid]
+ elif method == 'POST':
+ if sid is None or sid not in self.sockets:
+ self._log_error_once('Invalid session ' + sid, 'bad-sid')
+ r = self._bad_request('Invalid session ' + sid)
+ else:
+ socket = self._get_socket(sid)
+ try:
+ await socket.handle_post_request(environ)
+ r = self._ok(jsonp_index=jsonp_index)
+ except exceptions.EngineIOError:
+ if sid in self.sockets: # pragma: no cover
+ await self.disconnect(sid)
+ r = self._bad_request()
+ except: # pragma: no cover
+ # for any other unexpected errors, we log the error
+ # and keep going
+ self.logger.exception('post request handler error')
+ r = self._ok(jsonp_index=jsonp_index)
+ elif method == 'OPTIONS':
+ r = self._ok()
+ else:
+ self.logger.warning('Method %s not supported', method)
+ r = self._method_not_found()
+ if not isinstance(r, dict):
+ return r
+ if self.http_compression and \
+ len(r['response']) >= self.compression_threshold:
+ encodings = [e.split(';')[0].strip() for e in
+ environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
+ for encoding in encodings:
+ if encoding in self.compression_methods:
+ r['response'] = \
+ getattr(self, '_' + encoding)(r['response'])
+ r['headers'] += [('Content-Encoding', encoding)]
+ break
+ return await self._make_response(r, environ)
+
+ def start_background_task(self, target, *args, **kwargs):
+ """Start a background task using the appropriate async model.
+
+ This is a utility function that applications can use to start a
+ background task using the method that is compatible with the
+ selected async mode.
+
+ :param target: the target function to execute.
+ :param args: arguments to pass to the function.
+ :param kwargs: keyword arguments to pass to the function.
+
+ The return value is a ``asyncio.Task`` object.
+ """
+ return asyncio.ensure_future(target(*args, **kwargs))
+
+ async def sleep(self, seconds=0):
+ """Sleep for the requested amount of time using the appropriate async
+ model.
+
+ This is a utility function that applications can use to put a task to
+ sleep without having to worry about using the correct call for the
+ selected async mode.
+
+ Note: this method is a coroutine.
+ """
+ return await asyncio.sleep(seconds)
+
+ def create_queue(self, *args, **kwargs):
+ """Create a queue object using the appropriate async model.
+
+ This is a utility function that applications can use to create a queue
+ without having to worry about using the correct call for the selected
+ async mode. For asyncio based async modes, this returns an instance of
+ ``asyncio.Queue``.
+ """
+ return asyncio.Queue(*args, **kwargs)
+
+ def get_queue_empty_exception(self):
+ """Return the queue empty exception for the appropriate async model.
+
+ This is a utility function that applications can use to work with a
+ queue without having to worry about using the correct call for the
+ selected async mode. For asyncio based async modes, this returns an
+ instance of ``asyncio.QueueEmpty``.
+ """
+ return asyncio.QueueEmpty
+
+ def create_event(self, *args, **kwargs):
+ """Create an event object using the appropriate async model.
+
+ This is a utility function that applications can use to create an
+ event without having to worry about using the correct call for the
+ selected async mode. For asyncio based async modes, this returns
+ an instance of ``asyncio.Event``.
+ """
+ return asyncio.Event(*args, **kwargs)
+
+ async def _make_response(self, response_dict, environ):
+ cors_headers = self._cors_headers(environ)
+ make_response = self._async['make_response']
+ if asyncio.iscoroutinefunction(make_response):
+ response = await make_response(
+ response_dict['status'],
+ response_dict['headers'] + cors_headers,
+ response_dict['response'], environ)
+ else:
+ response = make_response(
+ response_dict['status'],
+ response_dict['headers'] + cors_headers,
+ response_dict['response'], environ)
+ return response
+
+ async def _handle_connect(self, environ, transport, jsonp_index=None):
+ """Handle a client connection request."""
+ if self.start_service_task:
+ # start the service task to monitor connected clients
+ self.start_service_task = False
+ self.start_background_task(self._service_task)
+
+ sid = self.generate_id()
+ s = asyncio_socket.AsyncSocket(self, sid)
+ self.sockets[sid] = s
+
+ pkt = packet.Packet(
+ packet.OPEN, {'sid': sid,
+ 'upgrades': self._upgrades(sid, transport),
+ 'pingTimeout': int(self.ping_timeout * 1000),
+ 'pingInterval': int(self.ping_interval * 1000)})
+ await s.send(pkt)
+ s.schedule_ping()
+
+ ret = await self._trigger_event('connect', sid, environ,
+ run_async=False)
+ if ret is not None and ret is not True:
+ del self.sockets[sid]
+ self.logger.warning('Application rejected connection')
+ return self._unauthorized(ret or None)
+
+ if transport == 'websocket':
+ ret = await s.handle_get_request(environ)
+ if s.closed and sid in self.sockets:
+ # websocket connection ended, so we are done
+ del self.sockets[sid]
+ return ret
+ else:
+ s.connected = True
+ headers = None
+ if self.cookie:
+ if isinstance(self.cookie, dict):
+ headers = [(
+ 'Set-Cookie',
+ self._generate_sid_cookie(sid, self.cookie)
+ )]
+ else:
+ headers = [(
+ 'Set-Cookie',
+ self._generate_sid_cookie(sid, {
+ 'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
+ })
+ )]
+ try:
+ return self._ok(await s.poll(), headers=headers,
+ jsonp_index=jsonp_index)
+ except exceptions.QueueEmpty:
+ return self._bad_request()
+
+ async def _trigger_event(self, event, *args, **kwargs):
+ """Invoke an event handler."""
+ run_async = kwargs.pop('run_async', False)
+ ret = None
+ if event in self.handlers:
+ if asyncio.iscoroutinefunction(self.handlers[event]) is True:
+ if run_async:
+ return self.start_background_task(self.handlers[event],
+ *args)
+ else:
+ try:
+ ret = await self.handlers[event](*args)
+ except asyncio.CancelledError: # pragma: no cover
+ pass
+ except:
+ self.logger.exception(event + ' async handler error')
+ if event == 'connect':
+ # if connect handler raised error we reject the
+ # connection
+ return False
+ else:
+ if run_async:
+ async def async_handler():
+ return self.handlers[event](*args)
+
+ return self.start_background_task(async_handler)
+ else:
+ try:
+ ret = self.handlers[event](*args)
+ except:
+ self.logger.exception(event + ' handler error')
+ if event == 'connect':
+ # if connect handler raised error we reject the
+ # connection
+ return False
+ return ret
+
+ async def _service_task(self): # pragma: no cover
+ """Monitor connected clients and clean up those that time out."""
+ while True:
+ if len(self.sockets) == 0:
+ # nothing to do
+ await self.sleep(self.ping_timeout)
+ continue
+
+ # go through the entire client list in a ping interval cycle
+ sleep_interval = self.ping_timeout / len(self.sockets)
+
+ try:
+ # iterate over the current clients
+ for socket in self.sockets.copy().values():
+ if not socket.closing and not socket.closed:
+ await socket.check_ping_timeout()
+ await self.sleep(sleep_interval)
+ except (
+ SystemExit,
+ KeyboardInterrupt,
+ asyncio.CancelledError,
+ GeneratorExit,
+ ):
+ self.logger.info('service task canceled')
+ break
+ except:
+ if asyncio.get_event_loop().is_closed():
+ self.logger.info('event loop is closed, exiting service '
+ 'task')
+ break
+
+ # an unexpected exception has occurred, log it and continue
+ self.logger.exception('service task exception')
diff --git a/matteo_env/Lib/site-packages/engineio/asyncio_socket.py b/matteo_env/Lib/site-packages/engineio/asyncio_socket.py
new file mode 100644
index 0000000..508ee3c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/asyncio_socket.py
@@ -0,0 +1,245 @@
+import asyncio
+import sys
+import time
+
+from . import exceptions
+from . import packet
+from . import payload
+from . import socket
+
+
+class AsyncSocket(socket.Socket):
+ async def poll(self):
+ """Wait for packets to send to the client."""
+ try:
+ packets = [await asyncio.wait_for(
+ self.queue.get(),
+ self.server.ping_interval + self.server.ping_timeout)]
+ self.queue.task_done()
+ except (asyncio.TimeoutError, asyncio.CancelledError):
+ raise exceptions.QueueEmpty()
+ if packets == [None]:
+ return []
+ while True:
+ try:
+ pkt = self.queue.get_nowait()
+ self.queue.task_done()
+ if pkt is None:
+ self.queue.put_nowait(None)
+ break
+ packets.append(pkt)
+ except asyncio.QueueEmpty:
+ break
+ return packets
+
+ async def receive(self, pkt):
+ """Receive packet from the client."""
+ self.server.logger.info('%s: Received packet %s data %s',
+ self.sid, packet.packet_names[pkt.packet_type],
+ pkt.data if not isinstance(pkt.data, bytes)
+ else '')
+ if pkt.packet_type == packet.PONG:
+ self.schedule_ping()
+ elif pkt.packet_type == packet.MESSAGE:
+ await self.server._trigger_event(
+ 'message', self.sid, pkt.data,
+ run_async=self.server.async_handlers)
+ elif pkt.packet_type == packet.UPGRADE:
+ await self.send(packet.Packet(packet.NOOP))
+ elif pkt.packet_type == packet.CLOSE:
+ await self.close(wait=False, abort=True)
+ else:
+ raise exceptions.UnknownPacketError()
+
+ async def check_ping_timeout(self):
+ """Make sure the client is still sending pings."""
+ if self.closed:
+ raise exceptions.SocketIsClosedError()
+ if self.last_ping and \
+ time.time() - self.last_ping > self.server.ping_timeout:
+ self.server.logger.info('%s: Client is gone, closing socket',
+ self.sid)
+ # Passing abort=False here will cause close() to write a
+ # CLOSE packet. This has the effect of updating half-open sockets
+ # to their correct state of disconnected
+ await self.close(wait=False, abort=False)
+ return False
+ return True
+
+ async def send(self, pkt):
+ """Send a packet to the client."""
+ if not await self.check_ping_timeout():
+ return
+ else:
+ await self.queue.put(pkt)
+ self.server.logger.info('%s: Sending packet %s data %s',
+ self.sid, packet.packet_names[pkt.packet_type],
+ pkt.data if not isinstance(pkt.data, bytes)
+ else '')
+
+ async def handle_get_request(self, environ):
+ """Handle a long-polling GET request from the client."""
+ connections = [
+ s.strip()
+ for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
+ transport = environ.get('HTTP_UPGRADE', '').lower()
+ if 'upgrade' in connections and transport in self.upgrade_protocols:
+ self.server.logger.info('%s: Received request to upgrade to %s',
+ self.sid, transport)
+ return await getattr(self, '_upgrade_' + transport)(environ)
+ if self.upgrading or self.upgraded:
+ # we are upgrading to WebSocket, do not return any more packets
+ # through the polling endpoint
+ return [packet.Packet(packet.NOOP)]
+ try:
+ packets = await self.poll()
+ except exceptions.QueueEmpty:
+ exc = sys.exc_info()
+ await self.close(wait=False)
+ raise exc[1].with_traceback(exc[2])
+ return packets
+
+ async def handle_post_request(self, environ):
+ """Handle a long-polling POST request from the client."""
+ length = int(environ.get('CONTENT_LENGTH', '0'))
+ if length > self.server.max_http_buffer_size:
+ raise exceptions.ContentTooLongError()
+ else:
+ body = (await environ['wsgi.input'].read(length)).decode('utf-8')
+ p = payload.Payload(encoded_payload=body)
+ for pkt in p.packets:
+ await self.receive(pkt)
+
+ async def close(self, wait=True, abort=False):
+ """Close the socket connection."""
+ if not self.closed and not self.closing:
+ self.closing = True
+ await self.server._trigger_event('disconnect', self.sid)
+ if not abort:
+ await self.send(packet.Packet(packet.CLOSE))
+ self.closed = True
+ if wait:
+ await self.queue.join()
+
+ def schedule_ping(self):
+ async def send_ping():
+ self.last_ping = None
+ await asyncio.sleep(self.server.ping_interval)
+ if not self.closing and not self.closed:
+ self.last_ping = time.time()
+ await self.send(packet.Packet(packet.PING))
+
+ self.server.start_background_task(send_ping)
+
+ async def _upgrade_websocket(self, environ):
+ """Upgrade the connection from polling to websocket."""
+ if self.upgraded:
+ raise IOError('Socket has been upgraded already')
+ if self.server._async['websocket'] is None:
+ # the selected async mode does not support websocket
+ return self.server._bad_request()
+ ws = self.server._async['websocket'](self._websocket_handler)
+ return await ws(environ)
+
+ async def _websocket_handler(self, ws):
+ """Engine.IO handler for websocket transport."""
+ if self.connected:
+ # the socket was already connected, so this is an upgrade
+ self.upgrading = True # hold packet sends during the upgrade
+
+ try:
+ pkt = await ws.wait()
+ except IOError: # pragma: no cover
+ return
+ decoded_pkt = packet.Packet(encoded_packet=pkt)
+ if decoded_pkt.packet_type != packet.PING or \
+ decoded_pkt.data != 'probe':
+ self.server.logger.info(
+ '%s: Failed websocket upgrade, no PING packet', self.sid)
+ self.upgrading = False
+ return
+ await ws.send(packet.Packet(packet.PONG, data='probe').encode())
+ await self.queue.put(packet.Packet(packet.NOOP)) # end poll
+
+ try:
+ pkt = await ws.wait()
+ except IOError: # pragma: no cover
+ self.upgrading = False
+ return
+ decoded_pkt = packet.Packet(encoded_packet=pkt)
+ if decoded_pkt.packet_type != packet.UPGRADE:
+ self.upgraded = False
+ self.server.logger.info(
+ ('%s: Failed websocket upgrade, expected UPGRADE packet, '
+ 'received %s instead.'),
+ self.sid, pkt)
+ self.upgrading = False
+ return
+ self.upgraded = True
+ self.upgrading = False
+ else:
+ self.connected = True
+ self.upgraded = True
+
+ # start separate writer thread
+ async def writer():
+ while True:
+ packets = None
+ try:
+ packets = await self.poll()
+ except exceptions.QueueEmpty:
+ break
+ if not packets:
+ # empty packet list returned -> connection closed
+ break
+ try:
+ for pkt in packets:
+ await ws.send(pkt.encode())
+ except:
+ break
+ writer_task = asyncio.ensure_future(writer())
+
+ self.server.logger.info(
+ '%s: Upgrade to websocket successful', self.sid)
+
+ while True:
+ p = None
+ wait_task = asyncio.ensure_future(ws.wait())
+ try:
+ p = await asyncio.wait_for(
+ wait_task,
+ self.server.ping_interval + self.server.ping_timeout)
+ except asyncio.CancelledError: # pragma: no cover
+ # there is a bug (https://bugs.python.org/issue30508) in
+ # asyncio that causes a "Task exception never retrieved" error
+ # to appear when wait_task raises an exception before it gets
+ # cancelled. Calling wait_task.exception() prevents the error
+ # from being issued in Python 3.6, but causes other errors in
+ # other versions, so we run it with all errors suppressed and
+ # hope for the best.
+ try:
+ wait_task.exception()
+ except:
+ pass
+ break
+ except:
+ break
+ if p is None:
+ # connection closed by client
+ break
+ pkt = packet.Packet(encoded_packet=p)
+ try:
+ await self.receive(pkt)
+ except exceptions.UnknownPacketError: # pragma: no cover
+ pass
+ except exceptions.SocketIsClosedError: # pragma: no cover
+ self.server.logger.info('Receive error -- socket is closed')
+ break
+ except: # pragma: no cover
+ # if we get an unexpected exception we log the error and exit
+ # the connection properly
+ self.server.logger.exception('Unknown receive error')
+
+ await self.queue.put(None) # unlock the writer task so it can exit
+ await asyncio.wait_for(writer_task, timeout=None)
+ await self.close(wait=False, abort=True)
diff --git a/matteo_env/Lib/site-packages/engineio/client.py b/matteo_env/Lib/site-packages/engineio/client.py
new file mode 100644
index 0000000..524c1a6
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/client.py
@@ -0,0 +1,706 @@
+from base64 import b64encode
+from json import JSONDecodeError
+import logging
+try:
+ import queue
+except ImportError: # pragma: no cover
+ import Queue as queue
+import signal
+import ssl
+import threading
+import time
+import urllib
+
+try:
+ import requests
+except ImportError: # pragma: no cover
+ requests = None
+try:
+ import websocket
+except ImportError: # pragma: no cover
+ websocket = None
+from . import exceptions
+from . import packet
+from . import payload
+
+default_logger = logging.getLogger('engineio.client')
+connected_clients = []
+
+
+def signal_handler(sig, frame):
+ """SIGINT handler.
+
+ Disconnect all active clients and then invoke the original signal handler.
+ """
+ for client in connected_clients[:]:
+ if not client.is_asyncio_based():
+ client.disconnect()
+ if callable(original_signal_handler):
+ return original_signal_handler(sig, frame)
+ else: # pragma: no cover
+ # Handle case where no original SIGINT handler was present.
+ return signal.default_int_handler(sig, frame)
+
+
+original_signal_handler = None
+
+
+class Client(object):
+ """An Engine.IO client.
+
+ This class implements a fully compliant Engine.IO web client with support
+ for websocket and long-polling transports.
+
+ :param logger: To enable logging set to ``True`` or pass a logger object to
+ use. To disable logging set to ``False``. The default is
+ ``False``. Note that fatal errors are logged even when
+ ``logger`` is ``False``.
+ :param json: An alternative json module to use for encoding and decoding
+ packets. Custom json modules must have ``dumps`` and ``loads``
+ functions that are compatible with the standard library
+ versions.
+ :param request_timeout: A timeout in seconds for requests. The default is
+ 5 seconds.
+ :param http_session: an initialized ``requests.Session`` object to be used
+ when sending requests to the server. Use it if you
+ need to add special client options such as proxy
+ servers, SSL certificates, etc.
+ :param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
+ skip SSL certificate verification, allowing
+ connections to servers with self signed certificates.
+ The default is ``True``.
+ """
+ event_names = ['connect', 'disconnect', 'message']
+
+ def __init__(self,
+ logger=False,
+ json=None,
+ request_timeout=5,
+ http_session=None,
+ ssl_verify=True):
+ global original_signal_handler
+ if original_signal_handler is None and \
+ threading.current_thread() == threading.main_thread():
+ original_signal_handler = signal.signal(signal.SIGINT,
+ signal_handler)
+ self.handlers = {}
+ self.base_url = None
+ self.transports = None
+ self.current_transport = None
+ self.sid = None
+ self.upgrades = None
+ self.ping_interval = None
+ self.ping_timeout = None
+ self.http = http_session
+ self.ws = None
+ self.read_loop_task = None
+ self.write_loop_task = None
+ self.queue = None
+ self.state = 'disconnected'
+ self.ssl_verify = ssl_verify
+
+ if json is not None:
+ packet.Packet.json = json
+ if not isinstance(logger, bool):
+ self.logger = logger
+ else:
+ self.logger = default_logger
+ if self.logger.level == logging.NOTSET:
+ if logger:
+ self.logger.setLevel(logging.INFO)
+ else:
+ self.logger.setLevel(logging.ERROR)
+ self.logger.addHandler(logging.StreamHandler())
+
+ self.request_timeout = request_timeout
+
+ def is_asyncio_based(self):
+ return False
+
+ def on(self, event, handler=None):
+ """Register an event handler.
+
+ :param event: The event name. Can be ``'connect'``, ``'message'`` or
+ ``'disconnect'``.
+ :param handler: The function that should be invoked to handle the
+ event. When this parameter is not given, the method
+ acts as a decorator for the handler function.
+
+ Example usage::
+
+ # as a decorator:
+ @eio.on('connect')
+ def connect_handler():
+ print('Connection request')
+
+ # as a method:
+ def message_handler(msg):
+ print('Received message: ', msg)
+ eio.send('response')
+ eio.on('message', message_handler)
+ """
+ if event not in self.event_names:
+ raise ValueError('Invalid event')
+
+ def set_handler(handler):
+ self.handlers[event] = handler
+ return handler
+
+ if handler is None:
+ return set_handler
+ set_handler(handler)
+
+ def connect(self, url, headers=None, transports=None,
+ engineio_path='engine.io'):
+ """Connect to an Engine.IO server.
+
+ :param url: The URL of the Engine.IO server. It can include custom
+ query string parameters if required by the server.
+ :param headers: A dictionary with custom headers to send with the
+ connection request.
+ :param transports: The list of allowed transports. Valid transports
+ are ``'polling'`` and ``'websocket'``. If not
+ given, the polling transport is connected first,
+ then an upgrade to websocket is attempted.
+ :param engineio_path: The endpoint where the Engine.IO server is
+ installed. The default value is appropriate for
+ most cases.
+
+ Example usage::
+
+ eio = engineio.Client()
+ eio.connect('http://localhost:5000')
+ """
+ if self.state != 'disconnected':
+ raise ValueError('Client is not in a disconnected state')
+ valid_transports = ['polling', 'websocket']
+ if transports is not None:
+ if isinstance(transports, str):
+ transports = [transports]
+ transports = [transport for transport in transports
+ if transport in valid_transports]
+ if not transports:
+ raise ValueError('No valid transports provided')
+ self.transports = transports or valid_transports
+ self.queue = self.create_queue()
+ return getattr(self, '_connect_' + self.transports[0])(
+ url, headers or {}, engineio_path)
+
+ def wait(self):
+ """Wait until the connection with the server ends.
+
+ Client applications can use this function to block the main thread
+ during the life of the connection.
+ """
+ if self.read_loop_task:
+ self.read_loop_task.join()
+
+ def send(self, data):
+ """Send a message to a client.
+
+ :param data: The data to send to the client. Data can be of type
+ ``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
+ or ``dict``, the data will be serialized as JSON.
+ """
+ self._send_packet(packet.Packet(packet.MESSAGE, data=data))
+
+ def disconnect(self, abort=False):
+ """Disconnect from the server.
+
+ :param abort: If set to ``True``, do not wait for background tasks
+ associated with the connection to end.
+ """
+ if self.state == 'connected':
+ self._send_packet(packet.Packet(packet.CLOSE))
+ self.queue.put(None)
+ self.state = 'disconnecting'
+ self._trigger_event('disconnect', run_async=False)
+ if self.current_transport == 'websocket':
+ self.ws.close()
+ if not abort:
+ self.read_loop_task.join()
+ self.state = 'disconnected'
+ try:
+ connected_clients.remove(self)
+ except ValueError: # pragma: no cover
+ pass
+ self._reset()
+
+ def transport(self):
+ """Return the name of the transport currently in use.
+
+ The possible values returned by this function are ``'polling'`` and
+ ``'websocket'``.
+ """
+ return self.current_transport
+
+ def start_background_task(self, target, *args, **kwargs):
+ """Start a background task.
+
+ This is a utility function that applications can use to start a
+ background task.
+
+ :param target: the target function to execute.
+ :param args: arguments to pass to the function.
+ :param kwargs: keyword arguments to pass to the function.
+
+ This function returns an object compatible with the `Thread` class in
+ the Python standard library. The `start()` method on this object is
+ already called by this function.
+ """
+ th = threading.Thread(target=target, args=args, kwargs=kwargs)
+ th.start()
+ return th
+
+ def sleep(self, seconds=0):
+ """Sleep for the requested amount of time."""
+ return time.sleep(seconds)
+
+ def create_queue(self, *args, **kwargs):
+ """Create a queue object."""
+ q = queue.Queue(*args, **kwargs)
+ q.Empty = queue.Empty
+ return q
+
+ def create_event(self, *args, **kwargs):
+ """Create an event object."""
+ return threading.Event(*args, **kwargs)
+
+ def _reset(self):
+ self.state = 'disconnected'
+ self.sid = None
+
+ def _connect_polling(self, url, headers, engineio_path):
+ """Establish a long-polling connection to the Engine.IO server."""
+ if requests is None: # pragma: no cover
+ # not installed
+ self.logger.error('requests package is not installed -- cannot '
+ 'send HTTP requests!')
+ return
+ self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
+ self.logger.info('Attempting polling connection to ' + self.base_url)
+ r = self._send_request(
+ 'GET', self.base_url + self._get_url_timestamp(), headers=headers,
+ timeout=self.request_timeout)
+ if r is None:
+ self._reset()
+ raise exceptions.ConnectionError(
+ 'Connection refused by the server')
+ if r.status_code < 200 or r.status_code >= 300:
+ self._reset()
+ try:
+ arg = r.json()
+ except JSONDecodeError:
+ arg = None
+ raise exceptions.ConnectionError(
+ 'Unexpected status code {} in server response'.format(
+ r.status_code), arg)
+ try:
+ p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
+ except ValueError:
+ raise exceptions.ConnectionError(
+ 'Unexpected response from server') from None
+ open_packet = p.packets[0]
+ if open_packet.packet_type != packet.OPEN:
+ raise exceptions.ConnectionError(
+ 'OPEN packet not returned by server')
+ self.logger.info(
+ 'Polling connection accepted with ' + str(open_packet.data))
+ self.sid = open_packet.data['sid']
+ self.upgrades = open_packet.data['upgrades']
+ self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
+ self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
+ self.current_transport = 'polling'
+ self.base_url += '&sid=' + self.sid
+
+ self.state = 'connected'
+ connected_clients.append(self)
+ self._trigger_event('connect', run_async=False)
+
+ for pkt in p.packets[1:]:
+ self._receive_packet(pkt)
+
+ if 'websocket' in self.upgrades and 'websocket' in self.transports:
+ # attempt to upgrade to websocket
+ if self._connect_websocket(url, headers, engineio_path):
+ # upgrade to websocket succeeded, we're done here
+ return
+
+ # start background tasks associated with this client
+ self.write_loop_task = self.start_background_task(self._write_loop)
+ self.read_loop_task = self.start_background_task(
+ self._read_loop_polling)
+
+ def _connect_websocket(self, url, headers, engineio_path):
+ """Establish or upgrade to a WebSocket connection with the server."""
+ if websocket is None: # pragma: no cover
+ # not installed
+ self.logger.warning('websocket-client package not installed, only '
+ 'polling transport is available')
+ return False
+ websocket_url = self._get_engineio_url(url, engineio_path, 'websocket')
+ if self.sid:
+ self.logger.info(
+ 'Attempting WebSocket upgrade to ' + websocket_url)
+ upgrade = True
+ websocket_url += '&sid=' + self.sid
+ else:
+ upgrade = False
+ self.base_url = websocket_url
+ self.logger.info(
+ 'Attempting WebSocket connection to ' + websocket_url)
+
+ # get cookies and other settings from the long-polling connection
+ # so that they are preserved when connecting to the WebSocket route
+ cookies = None
+ extra_options = {}
+ if self.http:
+ # cookies
+ cookies = '; '.join(["{}={}".format(cookie.name, cookie.value)
+ for cookie in self.http.cookies])
+ for header, value in headers.items():
+ if header.lower() == 'cookie':
+ if cookies:
+ cookies += '; '
+ cookies += value
+ del headers[header]
+ break
+
+ # auth
+ if 'Authorization' not in headers and self.http.auth is not None:
+ if not isinstance(self.http.auth, tuple): # pragma: no cover
+ raise ValueError('Only basic authentication is supported')
+ basic_auth = '{}:{}'.format(
+ self.http.auth[0], self.http.auth[1]).encode('utf-8')
+ basic_auth = b64encode(basic_auth).decode('utf-8')
+ headers['Authorization'] = 'Basic ' + basic_auth
+
+ # cert
+ # this can be given as ('certfile', 'keyfile') or just 'certfile'
+ if isinstance(self.http.cert, tuple):
+ extra_options['sslopt'] = {
+ 'certfile': self.http.cert[0],
+ 'keyfile': self.http.cert[1]}
+ elif self.http.cert:
+ extra_options['sslopt'] = {'certfile': self.http.cert}
+
+ # proxies
+ if self.http.proxies:
+ proxy_url = None
+ if websocket_url.startswith('ws://'):
+ proxy_url = self.http.proxies.get(
+ 'ws', self.http.proxies.get('http'))
+ else: # wss://
+ proxy_url = self.http.proxies.get(
+ 'wss', self.http.proxies.get('https'))
+ if proxy_url:
+ parsed_url = urllib.parse.urlparse(
+ proxy_url if '://' in proxy_url
+ else 'scheme://' + proxy_url)
+ extra_options['http_proxy_host'] = parsed_url.hostname
+ extra_options['http_proxy_port'] = parsed_url.port
+ extra_options['http_proxy_auth'] = (
+ (parsed_url.username, parsed_url.password)
+ if parsed_url.username or parsed_url.password
+ else None)
+
+ # verify
+ if not self.http.verify:
+ self.ssl_verify = False
+
+ if not self.ssl_verify:
+ extra_options['sslopt'] = {"cert_reqs": ssl.CERT_NONE}
+ try:
+ ws = websocket.create_connection(
+ websocket_url + self._get_url_timestamp(), header=headers,
+ cookie=cookies, enable_multithread=True, **extra_options)
+ except (ConnectionError, IOError, websocket.WebSocketException):
+ if upgrade:
+ self.logger.warning(
+ 'WebSocket upgrade failed: connection error')
+ return False
+ else:
+ raise exceptions.ConnectionError('Connection error')
+ if upgrade:
+ p = packet.Packet(packet.PING, data='probe').encode()
+ try:
+ ws.send(p)
+ except Exception as e: # pragma: no cover
+ self.logger.warning(
+ 'WebSocket upgrade failed: unexpected send exception: %s',
+ str(e))
+ return False
+ try:
+ p = ws.recv()
+ except Exception as e: # pragma: no cover
+ self.logger.warning(
+ 'WebSocket upgrade failed: unexpected recv exception: %s',
+ str(e))
+ return False
+ pkt = packet.Packet(encoded_packet=p)
+ if pkt.packet_type != packet.PONG or pkt.data != 'probe':
+ self.logger.warning(
+ 'WebSocket upgrade failed: no PONG packet')
+ return False
+ p = packet.Packet(packet.UPGRADE).encode()
+ try:
+ ws.send(p)
+ except Exception as e: # pragma: no cover
+ self.logger.warning(
+ 'WebSocket upgrade failed: unexpected send exception: %s',
+ str(e))
+ return False
+ self.current_transport = 'websocket'
+ self.logger.info('WebSocket upgrade was successful')
+ else:
+ try:
+ p = ws.recv()
+ except Exception as e: # pragma: no cover
+ raise exceptions.ConnectionError(
+ 'Unexpected recv exception: ' + str(e))
+ open_packet = packet.Packet(encoded_packet=p)
+ if open_packet.packet_type != packet.OPEN:
+ raise exceptions.ConnectionError('no OPEN packet')
+ self.logger.info(
+ 'WebSocket connection accepted with ' + str(open_packet.data))
+ self.sid = open_packet.data['sid']
+ self.upgrades = open_packet.data['upgrades']
+ self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
+ self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
+ self.current_transport = 'websocket'
+
+ self.state = 'connected'
+ connected_clients.append(self)
+ self._trigger_event('connect', run_async=False)
+ self.ws = ws
+ self.ws.settimeout(self.ping_interval + self.ping_timeout)
+
+ # start background tasks associated with this client
+ self.write_loop_task = self.start_background_task(self._write_loop)
+ self.read_loop_task = self.start_background_task(
+ self._read_loop_websocket)
+ return True
+
+ def _receive_packet(self, pkt):
+ """Handle incoming packets from the server."""
+ packet_name = packet.packet_names[pkt.packet_type] \
+ if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
+ self.logger.info(
+ 'Received packet %s data %s', packet_name,
+ pkt.data if not isinstance(pkt.data, bytes) else '')
+ if pkt.packet_type == packet.MESSAGE:
+ self._trigger_event('message', pkt.data, run_async=True)
+ elif pkt.packet_type == packet.PING:
+ self._send_packet(packet.Packet(packet.PONG, pkt.data))
+ elif pkt.packet_type == packet.CLOSE:
+ self.disconnect(abort=True)
+ elif pkt.packet_type == packet.NOOP:
+ pass
+ else:
+ self.logger.error('Received unexpected packet of type %s',
+ pkt.packet_type)
+
+ def _send_packet(self, pkt):
+ """Queue a packet to be sent to the server."""
+ if self.state != 'connected':
+ return
+ self.queue.put(pkt)
+ self.logger.info(
+ 'Sending packet %s data %s',
+ packet.packet_names[pkt.packet_type],
+ pkt.data if not isinstance(pkt.data, bytes) else '')
+
+ def _send_request(
+ self, method, url, headers=None, body=None,
+ timeout=None): # pragma: no cover
+ if self.http is None:
+ self.http = requests.Session()
+ try:
+ return self.http.request(method, url, headers=headers, data=body,
+ timeout=timeout, verify=self.ssl_verify)
+ except requests.exceptions.RequestException as exc:
+ self.logger.info('HTTP %s request to %s failed with error %s.',
+ method, url, exc)
+
+ def _trigger_event(self, event, *args, **kwargs):
+ """Invoke an event handler."""
+ run_async = kwargs.pop('run_async', False)
+ if event in self.handlers:
+ if run_async:
+ return self.start_background_task(self.handlers[event], *args)
+ else:
+ try:
+ return self.handlers[event](*args)
+ except:
+ self.logger.exception(event + ' handler error')
+
+ def _get_engineio_url(self, url, engineio_path, transport):
+ """Generate the Engine.IO connection URL."""
+ engineio_path = engineio_path.strip('/')
+ parsed_url = urllib.parse.urlparse(url)
+
+ if transport == 'polling':
+ scheme = 'http'
+ elif transport == 'websocket':
+ scheme = 'ws'
+ else: # pragma: no cover
+ raise ValueError('invalid transport')
+ if parsed_url.scheme in ['https', 'wss']:
+ scheme += 's'
+
+ return ('{scheme}://{netloc}/{path}/?{query}'
+ '{sep}transport={transport}&EIO=4').format(
+ scheme=scheme, netloc=parsed_url.netloc,
+ path=engineio_path, query=parsed_url.query,
+ sep='&' if parsed_url.query else '',
+ transport=transport)
+
+ def _get_url_timestamp(self):
+ """Generate the Engine.IO query string timestamp."""
+ return '&t=' + str(time.time())
+
+ def _read_loop_polling(self):
+ """Read packets by polling the Engine.IO server."""
+ while self.state == 'connected':
+ self.logger.info(
+ 'Sending polling GET request to ' + self.base_url)
+ r = self._send_request(
+ 'GET', self.base_url + self._get_url_timestamp(),
+ timeout=max(self.ping_interval, self.ping_timeout) + 5)
+ if r is None:
+ self.logger.warning(
+ 'Connection refused by the server, aborting')
+ self.queue.put(None)
+ break
+ if r.status_code < 200 or r.status_code >= 300:
+ self.logger.warning('Unexpected status code %s in server '
+ 'response, aborting', r.status_code)
+ self.queue.put(None)
+ break
+ try:
+ p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
+ except ValueError:
+ self.logger.warning(
+ 'Unexpected packet from server, aborting')
+ self.queue.put(None)
+ break
+ for pkt in p.packets:
+ self._receive_packet(pkt)
+
+ self.logger.info('Waiting for write loop task to end')
+ self.write_loop_task.join()
+ if self.state == 'connected':
+ self._trigger_event('disconnect', run_async=False)
+ try:
+ connected_clients.remove(self)
+ except ValueError: # pragma: no cover
+ pass
+ self._reset()
+ self.logger.info('Exiting read loop task')
+
+ def _read_loop_websocket(self):
+ """Read packets from the Engine.IO WebSocket connection."""
+ while self.state == 'connected':
+ p = None
+ try:
+ p = self.ws.recv()
+ except websocket.WebSocketTimeoutException:
+ self.logger.warning(
+ 'Server has stopped communicating, aborting')
+ self.queue.put(None)
+ break
+ except websocket.WebSocketConnectionClosedException:
+ self.logger.warning(
+ 'WebSocket connection was closed, aborting')
+ self.queue.put(None)
+ break
+ except Exception as e:
+ self.logger.info(
+ 'Unexpected error receiving packet: "%s", aborting',
+ str(e))
+ self.queue.put(None)
+ break
+ try:
+ pkt = packet.Packet(encoded_packet=p)
+ except Exception as e: # pragma: no cover
+ self.logger.info(
+ 'Unexpected error decoding packet: "%s", aborting', str(e))
+ self.queue.put(None)
+ break
+ self._receive_packet(pkt)
+
+ self.logger.info('Waiting for write loop task to end')
+ self.write_loop_task.join()
+ if self.state == 'connected':
+ self._trigger_event('disconnect', run_async=False)
+ try:
+ connected_clients.remove(self)
+ except ValueError: # pragma: no cover
+ pass
+ self._reset()
+ self.logger.info('Exiting read loop task')
+
+ def _write_loop(self):
+ """This background task sends packages to the server as they are
+ pushed to the send queue.
+ """
+ while self.state == 'connected':
+ # to simplify the timeout handling, use the maximum of the
+ # ping interval and ping timeout as timeout, with an extra 5
+ # seconds grace period
+ timeout = max(self.ping_interval, self.ping_timeout) + 5
+ packets = None
+ try:
+ packets = [self.queue.get(timeout=timeout)]
+ except self.queue.Empty:
+ self.logger.error('packet queue is empty, aborting')
+ break
+ if packets == [None]:
+ self.queue.task_done()
+ packets = []
+ else:
+ while True:
+ try:
+ packets.append(self.queue.get(block=False))
+ except self.queue.Empty:
+ break
+ if packets[-1] is None:
+ packets = packets[:-1]
+ self.queue.task_done()
+ break
+ if not packets:
+ # empty packet list returned -> connection closed
+ break
+ if self.current_transport == 'polling':
+ p = payload.Payload(packets=packets)
+ r = self._send_request(
+ 'POST', self.base_url, body=p.encode(),
+ headers={'Content-Type': 'application/octet-stream'},
+ timeout=self.request_timeout)
+ for pkt in packets:
+ self.queue.task_done()
+ if r is None:
+ self.logger.warning(
+ 'Connection refused by the server, aborting')
+ break
+ if r.status_code < 200 or r.status_code >= 300:
+ self.logger.warning('Unexpected status code %s in server '
+ 'response, aborting', r.status_code)
+ self._reset()
+ break
+ else:
+ # websocket
+ try:
+ for pkt in packets:
+ encoded_packet = pkt.encode()
+ if pkt.binary:
+ self.ws.send_binary(encoded_packet)
+ else:
+ self.ws.send(encoded_packet)
+ self.queue.task_done()
+ except (websocket.WebSocketConnectionClosedException,
+ BrokenPipeError, OSError):
+ self.logger.warning(
+ 'WebSocket connection was closed, aborting')
+ break
+ self.logger.info('Exiting write loop task')
diff --git a/matteo_env/Lib/site-packages/engineio/exceptions.py b/matteo_env/Lib/site-packages/engineio/exceptions.py
new file mode 100644
index 0000000..fb0b3e0
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/exceptions.py
@@ -0,0 +1,22 @@
+class EngineIOError(Exception):
+ pass
+
+
+class ContentTooLongError(EngineIOError):
+ pass
+
+
+class UnknownPacketError(EngineIOError):
+ pass
+
+
+class QueueEmpty(EngineIOError):
+ pass
+
+
+class SocketIsClosedError(EngineIOError):
+ pass
+
+
+class ConnectionError(EngineIOError):
+ pass
diff --git a/matteo_env/Lib/site-packages/engineio/middleware.py b/matteo_env/Lib/site-packages/engineio/middleware.py
new file mode 100644
index 0000000..d0bdcc7
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/middleware.py
@@ -0,0 +1,87 @@
+import os
+from engineio.static_files import get_static_file
+
+
+class WSGIApp(object):
+ """WSGI application middleware for Engine.IO.
+
+ This middleware dispatches traffic to an Engine.IO application. It can
+ also serve a list of static files to the client, or forward unrelated
+ HTTP traffic to another WSGI application.
+
+ :param engineio_app: The Engine.IO server. Must be an instance of the
+ ``engineio.Server`` class.
+ :param wsgi_app: The WSGI app that receives all other traffic.
+ :param static_files: A dictionary with static file mapping rules. See the
+ documentation for details on this argument.
+ :param engineio_path: The endpoint where the Engine.IO application should
+ be installed. The default value is appropriate for
+ most cases.
+
+ Example usage::
+
+ import engineio
+ import eventlet
+
+ eio = engineio.Server()
+ app = engineio.WSGIApp(eio, static_files={
+ '/': {'content_type': 'text/html', 'filename': 'index.html'},
+ '/index.html': {'content_type': 'text/html',
+ 'filename': 'index.html'},
+ })
+ eventlet.wsgi.server(eventlet.listen(('', 8000)), app)
+ """
+ def __init__(self, engineio_app, wsgi_app=None, static_files=None,
+ engineio_path='engine.io'):
+ self.engineio_app = engineio_app
+ self.wsgi_app = wsgi_app
+ self.engineio_path = engineio_path.strip('/')
+ self.static_files = static_files or {}
+
+ def __call__(self, environ, start_response):
+ if 'gunicorn.socket' in environ:
+ # gunicorn saves the socket under environ['gunicorn.socket'], while
+ # eventlet saves it under environ['eventlet.input']. Eventlet also
+ # stores the socket inside a wrapper class, while gunicon writes it
+ # directly into the environment. To give eventlet's WebSocket
+ # module access to this socket when running under gunicorn, here we
+ # copy the socket to the eventlet format.
+ class Input(object):
+ def __init__(self, socket):
+ self.socket = socket
+
+ def get_socket(self):
+ return self.socket
+
+ environ['eventlet.input'] = Input(environ['gunicorn.socket'])
+ path = environ['PATH_INFO']
+ if path is not None and \
+ path.startswith('/{0}/'.format(self.engineio_path)):
+ return self.engineio_app.handle_request(environ, start_response)
+ else:
+ static_file = get_static_file(path, self.static_files) \
+ if self.static_files else None
+ if static_file:
+ if os.path.exists(static_file['filename']):
+ start_response(
+ '200 OK',
+ [('Content-Type', static_file['content_type'])])
+ with open(static_file['filename'], 'rb') as f:
+ return [f.read()]
+ else:
+ return self.not_found(start_response)
+ elif self.wsgi_app is not None:
+ return self.wsgi_app(environ, start_response)
+ return self.not_found(start_response)
+
+ def not_found(self, start_response):
+ start_response("404 Not Found", [('Content-Type', 'text/plain')])
+ return [b'Not Found']
+
+
+class Middleware(WSGIApp):
+ """This class has been renamed to ``WSGIApp`` and is now deprecated."""
+ def __init__(self, engineio_app, wsgi_app=None,
+ engineio_path='engine.io'):
+ super(Middleware, self).__init__(engineio_app, wsgi_app,
+ engineio_path=engineio_path)
diff --git a/matteo_env/Lib/site-packages/engineio/packet.py b/matteo_env/Lib/site-packages/engineio/packet.py
new file mode 100644
index 0000000..9dbd6c6
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/packet.py
@@ -0,0 +1,72 @@
+import base64
+import json as _json
+
+(OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6)
+packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP']
+
+binary_types = (bytes, bytearray)
+
+
+class Packet(object):
+ """Engine.IO packet."""
+
+ json = _json
+
+ def __init__(self, packet_type=NOOP, data=None, encoded_packet=None):
+ self.packet_type = packet_type
+ self.data = data
+ if isinstance(data, str):
+ self.binary = False
+ elif isinstance(data, binary_types):
+ self.binary = True
+ else:
+ self.binary = False
+ if self.binary and self.packet_type != MESSAGE:
+ raise ValueError('Binary packets can only be of type MESSAGE')
+ if encoded_packet:
+ self.decode(encoded_packet)
+
+ def encode(self, b64=False):
+ """Encode the packet for transmission."""
+ if self.binary:
+ if b64:
+ encoded_packet = 'b' + base64.b64encode(self.data).decode(
+ 'utf-8')
+ else:
+ encoded_packet = self.data
+ else:
+ encoded_packet = str(self.packet_type)
+ if isinstance(self.data, str):
+ encoded_packet += self.data
+ elif isinstance(self.data, dict) or isinstance(self.data, list):
+ encoded_packet += self.json.dumps(self.data,
+ separators=(',', ':'))
+ elif self.data is not None:
+ encoded_packet += str(self.data)
+ return encoded_packet
+
+ def decode(self, encoded_packet):
+ """Decode a transmitted package."""
+ self.binary = isinstance(encoded_packet, binary_types)
+ b64 = not self.binary and encoded_packet[0] == 'b'
+ if b64:
+ self.binary = True
+ self.packet_type = MESSAGE
+ self.data = base64.b64decode(encoded_packet[1:])
+ else:
+ if self.binary and not isinstance(encoded_packet, bytes):
+ encoded_packet = bytes(encoded_packet)
+ if self.binary:
+ self.packet_type = MESSAGE
+ self.data = encoded_packet
+ else:
+ self.packet_type = int(encoded_packet[0])
+ try:
+ self.data = self.json.loads(encoded_packet[1:])
+ if isinstance(self.data, int):
+ # do not allow integer payloads, see
+ # github.com/miguelgrinberg/python-engineio/issues/75
+ # for background on this decision
+ raise ValueError
+ except ValueError:
+ self.data = encoded_packet[1:]
diff --git a/matteo_env/Lib/site-packages/engineio/payload.py b/matteo_env/Lib/site-packages/engineio/payload.py
new file mode 100644
index 0000000..f0e9e34
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/payload.py
@@ -0,0 +1,46 @@
+import urllib
+
+from . import packet
+
+
+class Payload(object):
+ """Engine.IO payload."""
+ max_decode_packets = 16
+
+ def __init__(self, packets=None, encoded_payload=None):
+ self.packets = packets or []
+ if encoded_payload is not None:
+ self.decode(encoded_payload)
+
+ def encode(self, jsonp_index=None):
+ """Encode the payload for transmission."""
+ encoded_payload = ''
+ for pkt in self.packets:
+ if encoded_payload:
+ encoded_payload += '\x1e'
+ encoded_payload += pkt.encode(b64=True)
+ if jsonp_index is not None:
+ encoded_payload = '___eio[' + \
+ str(jsonp_index) + \
+ ']("' + \
+ encoded_payload.replace('"', '\\"') + \
+ '");'
+ return encoded_payload
+
+ def decode(self, encoded_payload):
+ """Decode a transmitted payload."""
+ self.packets = []
+
+ if len(encoded_payload) == 0:
+ return
+
+ # JSONP POST payload starts with 'd='
+ if encoded_payload.startswith('d='):
+ encoded_payload = urllib.parse.parse_qs(
+ encoded_payload)['d'][0]
+
+ encoded_packets = encoded_payload.split('\x1e')
+ if len(encoded_packets) > self.max_decode_packets:
+ raise ValueError('Too many packets in payload')
+ self.packets = [packet.Packet(encoded_packet=encoded_packet)
+ for encoded_packet in encoded_packets]
diff --git a/matteo_env/Lib/site-packages/engineio/server.py b/matteo_env/Lib/site-packages/engineio/server.py
new file mode 100644
index 0000000..703e7e9
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/server.py
@@ -0,0 +1,738 @@
+import base64
+import gzip
+import importlib
+import io
+import logging
+import secrets
+import urllib
+import zlib
+
+from . import exceptions
+from . import packet
+from . import payload
+from . import socket
+
+default_logger = logging.getLogger('engineio.server')
+
+
+class Server(object):
+ """An Engine.IO server.
+
+ This class implements a fully compliant Engine.IO web server with support
+ for websocket and long-polling transports.
+
+ :param async_mode: The asynchronous model to use. See the Deployment
+ section in the documentation for a description of the
+ available options. Valid async modes are "threading",
+ "eventlet", "gevent" and "gevent_uwsgi". If this
+ argument is not given, "eventlet" is tried first, then
+ "gevent_uwsgi", then "gevent", and finally "threading".
+ The first async mode that has all its dependencies
+ installed is the one that is chosen.
+ :param ping_interval: The interval in seconds at which the server pings
+ the client. The default is 25 seconds. For advanced
+ control, a two element tuple can be given, where
+ the first number is the ping interval and the second
+ is a grace period added by the server.
+ :param ping_timeout: The time in seconds that the client waits for the
+ server to respond before disconnecting. The default
+ is 5 seconds.
+ :param max_http_buffer_size: The maximum size of a message when using the
+ polling transport. The default is 1,000,000
+ bytes.
+ :param allow_upgrades: Whether to allow transport upgrades or not. The
+ default is ``True``.
+ :param http_compression: Whether to compress packages when using the
+ polling transport. The default is ``True``.
+ :param compression_threshold: Only compress messages when their byte size
+ is greater than this value. The default is
+ 1024 bytes.
+ :param cookie: If set to a string, it is the name of the HTTP cookie the
+ server sends back tot he client containing the client
+ session id. If set to a dictionary, the ``'name'`` key
+ contains the cookie name and other keys define cookie
+ attributes, where the value of each attribute can be a
+ string, a callable with no arguments, or a boolean. If set
+ to ``None`` (the default), a cookie is not sent to the
+ client.
+ :param cors_allowed_origins: Origin or list of origins that are allowed to
+ connect to this server. Only the same origin
+ is allowed by default. Set this argument to
+ ``'*'`` to allow all origins, or to ``[]`` to
+ disable CORS handling.
+ :param cors_credentials: Whether credentials (cookies, authentication) are
+ allowed in requests to this server. The default
+ is ``True``.
+ :param logger: To enable logging set to ``True`` or pass a logger object to
+ use. To disable logging set to ``False``. The default is
+ ``False``. Note that fatal errors are logged even when
+ ``logger`` is ``False``.
+ :param json: An alternative json module to use for encoding and decoding
+ packets. Custom json modules must have ``dumps`` and ``loads``
+ functions that are compatible with the standard library
+ versions.
+ :param async_handlers: If set to ``True``, run message event handlers in
+ non-blocking threads. To run handlers synchronously,
+ set to ``False``. The default is ``True``.
+ :param monitor_clients: If set to ``True``, a background task will ensure
+ inactive clients are closed. Set to ``False`` to
+ disable the monitoring task (not recommended). The
+ default is ``True``.
+ :param kwargs: Reserved for future extensions, any additional parameters
+ given as keyword arguments will be silently ignored.
+ """
+ compression_methods = ['gzip', 'deflate']
+ event_names = ['connect', 'disconnect', 'message']
+ _default_monitor_clients = True
+ sequence_number = 0
+
+ def __init__(self, async_mode=None, ping_interval=25, ping_timeout=5,
+ max_http_buffer_size=1000000, allow_upgrades=True,
+ http_compression=True, compression_threshold=1024,
+ cookie=None, cors_allowed_origins=None,
+ cors_credentials=True, logger=False, json=None,
+ async_handlers=True, monitor_clients=None, **kwargs):
+ self.ping_timeout = ping_timeout
+ if isinstance(ping_interval, tuple):
+ self.ping_interval = ping_interval[0]
+ self.ping_interval_grace_period = ping_interval[1]
+ else:
+ self.ping_interval = ping_interval
+ self.ping_interval_grace_period = 0
+ self.max_http_buffer_size = max_http_buffer_size
+ self.allow_upgrades = allow_upgrades
+ self.http_compression = http_compression
+ self.compression_threshold = compression_threshold
+ self.cookie = cookie
+ self.cors_allowed_origins = cors_allowed_origins
+ self.cors_credentials = cors_credentials
+ self.async_handlers = async_handlers
+ self.sockets = {}
+ self.handlers = {}
+ self.log_message_keys = set()
+ self.start_service_task = monitor_clients \
+ if monitor_clients is not None else self._default_monitor_clients
+ if json is not None:
+ packet.Packet.json = json
+ if not isinstance(logger, bool):
+ self.logger = logger
+ else:
+ self.logger = default_logger
+ if self.logger.level == logging.NOTSET:
+ if logger:
+ self.logger.setLevel(logging.INFO)
+ else:
+ self.logger.setLevel(logging.ERROR)
+ self.logger.addHandler(logging.StreamHandler())
+ modes = self.async_modes()
+ if async_mode is not None:
+ modes = [async_mode] if async_mode in modes else []
+ self._async = None
+ self.async_mode = None
+ for mode in modes:
+ try:
+ self._async = importlib.import_module(
+ 'engineio.async_drivers.' + mode)._async
+ asyncio_based = self._async['asyncio'] \
+ if 'asyncio' in self._async else False
+ if asyncio_based != self.is_asyncio_based():
+ continue # pragma: no cover
+ self.async_mode = mode
+ break
+ except ImportError:
+ pass
+ if self.async_mode is None:
+ raise ValueError('Invalid async_mode specified')
+ if self.is_asyncio_based() and \
+ ('asyncio' not in self._async or not
+ self._async['asyncio']): # pragma: no cover
+ raise ValueError('The selected async_mode is not asyncio '
+ 'compatible')
+ if not self.is_asyncio_based() and 'asyncio' in self._async and \
+ self._async['asyncio']: # pragma: no cover
+ raise ValueError('The selected async_mode requires asyncio and '
+ 'must use the AsyncServer class')
+ self.logger.info('Server initialized for %s.', self.async_mode)
+
+ def is_asyncio_based(self):
+ return False
+
+ def async_modes(self):
+ return ['eventlet', 'gevent_uwsgi', 'gevent', 'threading']
+
+ def on(self, event, handler=None):
+ """Register an event handler.
+
+ :param event: The event name. Can be ``'connect'``, ``'message'`` or
+ ``'disconnect'``.
+ :param handler: The function that should be invoked to handle the
+ event. When this parameter is not given, the method
+ acts as a decorator for the handler function.
+
+ Example usage::
+
+ # as a decorator:
+ @eio.on('connect')
+ def connect_handler(sid, environ):
+ print('Connection request')
+ if environ['REMOTE_ADDR'] in blacklisted:
+ return False # reject
+
+ # as a method:
+ def message_handler(sid, msg):
+ print('Received message: ', msg)
+ eio.send(sid, 'response')
+ eio.on('message', message_handler)
+
+ The handler function receives the ``sid`` (session ID) for the
+ client as first argument. The ``'connect'`` event handler receives the
+ WSGI environment as a second argument, and can return ``False`` to
+ reject the connection. The ``'message'`` handler receives the message
+ payload as a second argument. The ``'disconnect'`` handler does not
+ take a second argument.
+ """
+ if event not in self.event_names:
+ raise ValueError('Invalid event')
+
+ def set_handler(handler):
+ self.handlers[event] = handler
+ return handler
+
+ if handler is None:
+ return set_handler
+ set_handler(handler)
+
+ def send(self, sid, data):
+ """Send a message to a client.
+
+ :param sid: The session id of the recipient client.
+ :param data: The data to send to the client. Data can be of type
+ ``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
+ or ``dict``, the data will be serialized as JSON.
+ """
+ try:
+ socket = self._get_socket(sid)
+ except KeyError:
+ # the socket is not available
+ self.logger.warning('Cannot send to sid %s', sid)
+ return
+ socket.send(packet.Packet(packet.MESSAGE, data=data))
+
+ def get_session(self, sid):
+ """Return the user session for a client.
+
+ :param sid: The session id of the client.
+
+ The return value is a dictionary. Modifications made to this
+ dictionary are not guaranteed to be preserved unless
+ ``save_session()`` is called, or when the ``session`` context manager
+ is used.
+ """
+ socket = self._get_socket(sid)
+ return socket.session
+
+ def save_session(self, sid, session):
+ """Store the user session for a client.
+
+ :param sid: The session id of the client.
+ :param session: The session dictionary.
+ """
+ socket = self._get_socket(sid)
+ socket.session = session
+
+ def session(self, sid):
+ """Return the user session for a client with context manager syntax.
+
+ :param sid: The session id of the client.
+
+ This is a context manager that returns the user session dictionary for
+ the client. Any changes that are made to this dictionary inside the
+ context manager block are saved back to the session. Example usage::
+
+ @eio.on('connect')
+ def on_connect(sid, environ):
+ username = authenticate_user(environ)
+ if not username:
+ return False
+ with eio.session(sid) as session:
+ session['username'] = username
+
+ @eio.on('message')
+ def on_message(sid, msg):
+ with eio.session(sid) as session:
+ print('received message from ', session['username'])
+ """
+ class _session_context_manager(object):
+ def __init__(self, server, sid):
+ self.server = server
+ self.sid = sid
+ self.session = None
+
+ def __enter__(self):
+ self.session = self.server.get_session(sid)
+ return self.session
+
+ def __exit__(self, *args):
+ self.server.save_session(sid, self.session)
+
+ return _session_context_manager(self, sid)
+
+ def disconnect(self, sid=None):
+ """Disconnect a client.
+
+ :param sid: The session id of the client to close. If this parameter
+ is not given, then all clients are closed.
+ """
+ if sid is not None:
+ try:
+ socket = self._get_socket(sid)
+ except KeyError: # pragma: no cover
+ # the socket was already closed or gone
+ pass
+ else:
+ socket.close()
+ if sid in self.sockets: # pragma: no cover
+ del self.sockets[sid]
+ else:
+ for client in self.sockets.values():
+ client.close()
+ self.sockets = {}
+
+ def transport(self, sid):
+ """Return the name of the transport used by the client.
+
+ The two possible values returned by this function are ``'polling'``
+ and ``'websocket'``.
+
+ :param sid: The session of the client.
+ """
+ return 'websocket' if self._get_socket(sid).upgraded else 'polling'
+
+ def handle_request(self, environ, start_response):
+ """Handle an HTTP request from the client.
+
+ This is the entry point of the Engine.IO application, using the same
+ interface as a WSGI application. For the typical usage, this function
+ is invoked by the :class:`Middleware` instance, but it can be invoked
+ directly when the middleware is not used.
+
+ :param environ: The WSGI environment.
+ :param start_response: The WSGI ``start_response`` function.
+
+ This function returns the HTTP response body to deliver to the client
+ as a byte sequence.
+ """
+ if self.cors_allowed_origins != []:
+ # Validate the origin header if present
+ # This is important for WebSocket more than for HTTP, since
+ # browsers only apply CORS controls to HTTP.
+ origin = environ.get('HTTP_ORIGIN')
+ if origin:
+ allowed_origins = self._cors_allowed_origins(environ)
+ if allowed_origins is not None and origin not in \
+ allowed_origins:
+ self._log_error_once(
+ origin + ' is not an accepted origin.', 'bad-origin')
+ r = self._bad_request(
+ origin + ' is not an accepted origin.')
+ start_response(r['status'], r['headers'])
+ return [r['response']]
+
+ method = environ['REQUEST_METHOD']
+ query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
+ jsonp = False
+ jsonp_index = None
+
+ # make sure the client speaks a compatible Engine.IO version
+ sid = query['sid'][0] if 'sid' in query else None
+ if sid is None and query.get('EIO') != ['4']:
+ self._log_error_once(
+ 'The client is using an unsupported version of the Socket.IO '
+ 'or Engine.IO protocols', 'bad-version')
+ r = self._bad_request(
+ 'The client is using an unsupported version of the Socket.IO '
+ 'or Engine.IO protocols')
+ start_response(r['status'], r['headers'])
+ return [r['response']]
+
+ if 'j' in query:
+ jsonp = True
+ try:
+ jsonp_index = int(query['j'][0])
+ except (ValueError, KeyError, IndexError):
+ # Invalid JSONP index number
+ pass
+
+ if jsonp and jsonp_index is None:
+ self._log_error_once('Invalid JSONP index number',
+ 'bad-jsonp-index')
+ r = self._bad_request('Invalid JSONP index number')
+ elif method == 'GET':
+ if sid is None:
+ transport = query.get('transport', ['polling'])[0]
+ # transport must be one of 'polling' or 'websocket'.
+ # if 'websocket', the HTTP_UPGRADE header must match.
+ upgrade_header = environ.get('HTTP_UPGRADE').lower() \
+ if 'HTTP_UPGRADE' in environ else None
+ if transport == 'polling' \
+ or transport == upgrade_header == 'websocket':
+ r = self._handle_connect(environ, start_response,
+ transport, jsonp_index)
+ else:
+ self._log_error_once('Invalid transport ' + transport,
+ 'bad-transport')
+ r = self._bad_request('Invalid transport ' + transport)
+ else:
+ if sid not in self.sockets:
+ self._log_error_once('Invalid session ' + sid, 'bad-sid')
+ r = self._bad_request('Invalid session ' + sid)
+ else:
+ socket = self._get_socket(sid)
+ try:
+ packets = socket.handle_get_request(
+ environ, start_response)
+ if isinstance(packets, list):
+ r = self._ok(packets, jsonp_index=jsonp_index)
+ else:
+ r = packets
+ except exceptions.EngineIOError:
+ if sid in self.sockets: # pragma: no cover
+ self.disconnect(sid)
+ r = self._bad_request()
+ if sid in self.sockets and self.sockets[sid].closed:
+ del self.sockets[sid]
+ elif method == 'POST':
+ if sid is None or sid not in self.sockets:
+ self._log_error_once('Invalid session ' + sid, 'bad-sid')
+ r = self._bad_request('Invalid session ' + sid)
+ else:
+ socket = self._get_socket(sid)
+ try:
+ socket.handle_post_request(environ)
+ r = self._ok(jsonp_index=jsonp_index)
+ except exceptions.EngineIOError:
+ if sid in self.sockets: # pragma: no cover
+ self.disconnect(sid)
+ r = self._bad_request()
+ except: # pragma: no cover
+ # for any other unexpected errors, we log the error
+ # and keep going
+ self.logger.exception('post request handler error')
+ r = self._ok(jsonp_index=jsonp_index)
+ elif method == 'OPTIONS':
+ r = self._ok()
+ else:
+ self.logger.warning('Method %s not supported', method)
+ r = self._method_not_found()
+
+ if not isinstance(r, dict):
+ return r or []
+ if self.http_compression and \
+ len(r['response']) >= self.compression_threshold:
+ encodings = [e.split(';')[0].strip() for e in
+ environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
+ for encoding in encodings:
+ if encoding in self.compression_methods:
+ r['response'] = \
+ getattr(self, '_' + encoding)(r['response'])
+ r['headers'] += [('Content-Encoding', encoding)]
+ break
+ cors_headers = self._cors_headers(environ)
+ start_response(r['status'], r['headers'] + cors_headers)
+ return [r['response']]
+
+ def start_background_task(self, target, *args, **kwargs):
+ """Start a background task using the appropriate async model.
+
+ This is a utility function that applications can use to start a
+ background task using the method that is compatible with the
+ selected async mode.
+
+ :param target: the target function to execute.
+ :param args: arguments to pass to the function.
+ :param kwargs: keyword arguments to pass to the function.
+
+ This function returns an object compatible with the `Thread` class in
+ the Python standard library. The `start()` method on this object is
+ already called by this function.
+ """
+ th = self._async['thread'](target=target, args=args, kwargs=kwargs)
+ th.start()
+ return th # pragma: no cover
+
+ def sleep(self, seconds=0):
+ """Sleep for the requested amount of time using the appropriate async
+ model.
+
+ This is a utility function that applications can use to put a task to
+ sleep without having to worry about using the correct call for the
+ selected async mode.
+ """
+ return self._async['sleep'](seconds)
+
+ def create_queue(self, *args, **kwargs):
+ """Create a queue object using the appropriate async model.
+
+ This is a utility function that applications can use to create a queue
+ without having to worry about using the correct call for the selected
+ async mode.
+ """
+ return self._async['queue'](*args, **kwargs)
+
+ def get_queue_empty_exception(self):
+ """Return the queue empty exception for the appropriate async model.
+
+ This is a utility function that applications can use to work with a
+ queue without having to worry about using the correct call for the
+ selected async mode.
+ """
+ return self._async['queue_empty']
+
+ def create_event(self, *args, **kwargs):
+ """Create an event object using the appropriate async model.
+
+ This is a utility function that applications can use to create an
+ event without having to worry about using the correct call for the
+ selected async mode.
+ """
+ return self._async['event'](*args, **kwargs)
+
+ def generate_id(self):
+ """Generate a unique session id."""
+ id = base64.b64encode(
+ secrets.token_bytes(12) + self.sequence_number.to_bytes(3, 'big'))
+ self.sequence_number = (self.sequence_number + 1) & 0xffffff
+ return id.decode('utf-8').replace('/', '_').replace('+', '-')
+
+ def _generate_sid_cookie(self, sid, attributes):
+ """Generate the sid cookie."""
+ cookie = attributes.get('name', 'io') + '=' + sid
+ for attribute, value in attributes.items():
+ if attribute == 'name':
+ continue
+ if callable(value):
+ value = value()
+ if value is True:
+ cookie += '; ' + attribute
+ else:
+ cookie += '; ' + attribute + '=' + value
+ return cookie
+
+ def _handle_connect(self, environ, start_response, transport,
+ jsonp_index=None):
+ """Handle a client connection request."""
+ if self.start_service_task:
+ # start the service task to monitor connected clients
+ self.start_service_task = False
+ self.start_background_task(self._service_task)
+
+ sid = self.generate_id()
+ s = socket.Socket(self, sid)
+ self.sockets[sid] = s
+
+ pkt = packet.Packet(packet.OPEN, {
+ 'sid': sid,
+ 'upgrades': self._upgrades(sid, transport),
+ 'pingTimeout': int(self.ping_timeout * 1000),
+ 'pingInterval': int(
+ self.ping_interval + self.ping_interval_grace_period) * 1000})
+ s.send(pkt)
+ s.schedule_ping()
+
+ # NOTE: some sections below are marked as "no cover" to workaround
+ # what seems to be a bug in the coverage package. All the lines below
+ # are covered by tests, but some are not reported as such for some
+ # reason
+ ret = self._trigger_event('connect', sid, environ, run_async=False)
+ if ret is not None and ret is not True: # pragma: no cover
+ del self.sockets[sid]
+ self.logger.warning('Application rejected connection')
+ return self._unauthorized(ret or None)
+
+ if transport == 'websocket': # pragma: no cover
+ ret = s.handle_get_request(environ, start_response)
+ if s.closed and sid in self.sockets:
+ # websocket connection ended, so we are done
+ del self.sockets[sid]
+ return ret
+ else: # pragma: no cover
+ s.connected = True
+ headers = None
+ if self.cookie:
+ if isinstance(self.cookie, dict):
+ headers = [(
+ 'Set-Cookie',
+ self._generate_sid_cookie(sid, self.cookie)
+ )]
+ else:
+ headers = [(
+ 'Set-Cookie',
+ self._generate_sid_cookie(sid, {
+ 'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
+ })
+ )]
+ try:
+ return self._ok(s.poll(), headers=headers,
+ jsonp_index=jsonp_index)
+ except exceptions.QueueEmpty:
+ return self._bad_request()
+
+ def _upgrades(self, sid, transport):
+ """Return the list of possible upgrades for a client connection."""
+ if not self.allow_upgrades or self._get_socket(sid).upgraded or \
+ self._async['websocket'] is None or transport == 'websocket':
+ return []
+ return ['websocket']
+
+ def _trigger_event(self, event, *args, **kwargs):
+ """Invoke an event handler."""
+ run_async = kwargs.pop('run_async', False)
+ if event in self.handlers:
+ if run_async:
+ return self.start_background_task(self.handlers[event], *args)
+ else:
+ try:
+ return self.handlers[event](*args)
+ except:
+ self.logger.exception(event + ' handler error')
+ if event == 'connect':
+ # if connect handler raised error we reject the
+ # connection
+ return False
+
+ def _get_socket(self, sid):
+ """Return the socket object for a given session."""
+ try:
+ s = self.sockets[sid]
+ except KeyError:
+ raise KeyError('Session not found')
+ if s.closed:
+ del self.sockets[sid]
+ raise KeyError('Session is disconnected')
+ return s
+
+ def _ok(self, packets=None, headers=None, jsonp_index=None):
+ """Generate a successful HTTP response."""
+ if packets is not None:
+ if headers is None:
+ headers = []
+ headers += [('Content-Type', 'text/plain; charset=UTF-8')]
+ return {'status': '200 OK',
+ 'headers': headers,
+ 'response': payload.Payload(packets=packets).encode(
+ jsonp_index=jsonp_index).encode('utf-8')}
+ else:
+ return {'status': '200 OK',
+ 'headers': [('Content-Type', 'text/plain')],
+ 'response': b'OK'}
+
+ def _bad_request(self, message=None):
+ """Generate a bad request HTTP error response."""
+ if message is None:
+ message = 'Bad Request'
+ message = packet.Packet.json.dumps(message)
+ return {'status': '400 BAD REQUEST',
+ 'headers': [('Content-Type', 'text/plain')],
+ 'response': message.encode('utf-8')}
+
+ def _method_not_found(self):
+ """Generate a method not found HTTP error response."""
+ return {'status': '405 METHOD NOT FOUND',
+ 'headers': [('Content-Type', 'text/plain')],
+ 'response': b'Method Not Found'}
+
+ def _unauthorized(self, message=None):
+ """Generate a unauthorized HTTP error response."""
+ if message is None:
+ message = 'Unauthorized'
+ message = packet.Packet.json.dumps(message)
+ return {'status': '401 UNAUTHORIZED',
+ 'headers': [('Content-Type', 'application/json')],
+ 'response': message.encode('utf-8')}
+
+ def _cors_allowed_origins(self, environ):
+ default_origins = []
+ if 'wsgi.url_scheme' in environ and 'HTTP_HOST' in environ:
+ default_origins.append('{scheme}://{host}'.format(
+ scheme=environ['wsgi.url_scheme'], host=environ['HTTP_HOST']))
+ if 'HTTP_X_FORWARDED_HOST' in environ:
+ scheme = environ.get(
+ 'HTTP_X_FORWARDED_PROTO',
+ environ['wsgi.url_scheme']).split(',')[0].strip()
+ default_origins.append('{scheme}://{host}'.format(
+ scheme=scheme, host=environ['HTTP_X_FORWARDED_HOST'].split(
+ ',')[0].strip()))
+ if self.cors_allowed_origins is None:
+ allowed_origins = default_origins
+ elif self.cors_allowed_origins == '*':
+ allowed_origins = None
+ elif isinstance(self.cors_allowed_origins, str):
+ allowed_origins = [self.cors_allowed_origins]
+ else:
+ allowed_origins = self.cors_allowed_origins
+ return allowed_origins
+
+ def _cors_headers(self, environ):
+ """Return the cross-origin-resource-sharing headers."""
+ if self.cors_allowed_origins == []:
+ # special case, CORS handling is completely disabled
+ return []
+ headers = []
+ allowed_origins = self._cors_allowed_origins(environ)
+ if 'HTTP_ORIGIN' in environ and \
+ (allowed_origins is None or environ['HTTP_ORIGIN'] in
+ allowed_origins):
+ headers = [('Access-Control-Allow-Origin', environ['HTTP_ORIGIN'])]
+ if environ['REQUEST_METHOD'] == 'OPTIONS':
+ headers += [('Access-Control-Allow-Methods', 'OPTIONS, GET, POST')]
+ if 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in environ:
+ headers += [('Access-Control-Allow-Headers',
+ environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])]
+ if self.cors_credentials:
+ headers += [('Access-Control-Allow-Credentials', 'true')]
+ return headers
+
+ def _gzip(self, response):
+ """Apply gzip compression to a response."""
+ bytesio = io.BytesIO()
+ with gzip.GzipFile(fileobj=bytesio, mode='w') as gz:
+ gz.write(response)
+ return bytesio.getvalue()
+
+ def _deflate(self, response):
+ """Apply deflate compression to a response."""
+ return zlib.compress(response)
+
+ def _log_error_once(self, message, message_key):
+ """Log message with logging.ERROR level the first time, then log
+ with given level."""
+ if message_key not in self.log_message_keys:
+ self.logger.error(message + ' (further occurrences of this error '
+ 'will be logged with level INFO)')
+ self.log_message_keys.add(message_key)
+ else:
+ self.logger.info(message)
+
+ def _service_task(self): # pragma: no cover
+ """Monitor connected clients and clean up those that time out."""
+ while True:
+ if len(self.sockets) == 0:
+ # nothing to do
+ self.sleep(self.ping_timeout)
+ continue
+
+ # go through the entire client list in a ping interval cycle
+ sleep_interval = float(self.ping_timeout) / len(self.sockets)
+
+ try:
+ # iterate over the current clients
+ for s in self.sockets.copy().values():
+ if not s.closing and not s.closed:
+ s.check_ping_timeout()
+ self.sleep(sleep_interval)
+ except (SystemExit, KeyboardInterrupt):
+ self.logger.info('service task canceled')
+ break
+ except:
+ # an unexpected exception has occurred, log it and continue
+ self.logger.exception('service task exception')
diff --git a/matteo_env/Lib/site-packages/engineio/socket.py b/matteo_env/Lib/site-packages/engineio/socket.py
new file mode 100644
index 0000000..1434b19
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/socket.py
@@ -0,0 +1,254 @@
+import sys
+import time
+
+from . import exceptions
+from . import packet
+from . import payload
+
+
+class Socket(object):
+ """An Engine.IO socket."""
+ upgrade_protocols = ['websocket']
+
+ def __init__(self, server, sid):
+ self.server = server
+ self.sid = sid
+ self.queue = self.server.create_queue()
+ self.last_ping = None
+ self.connected = False
+ self.upgrading = False
+ self.upgraded = False
+ self.closing = False
+ self.closed = False
+ self.session = {}
+
+ def poll(self):
+ """Wait for packets to send to the client."""
+ queue_empty = self.server.get_queue_empty_exception()
+ try:
+ packets = [self.queue.get(
+ timeout=self.server.ping_interval + self.server.ping_timeout)]
+ self.queue.task_done()
+ except queue_empty:
+ raise exceptions.QueueEmpty()
+ if packets == [None]:
+ return []
+ while True:
+ try:
+ pkt = self.queue.get(block=False)
+ self.queue.task_done()
+ if pkt is None:
+ self.queue.put(None)
+ break
+ packets.append(pkt)
+ except queue_empty:
+ break
+ return packets
+
+ def receive(self, pkt):
+ """Receive packet from the client."""
+ packet_name = packet.packet_names[pkt.packet_type] \
+ if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
+ self.server.logger.info('%s: Received packet %s data %s',
+ self.sid, packet_name,
+ pkt.data if not isinstance(pkt.data, bytes)
+ else '')
+ if pkt.packet_type == packet.PONG:
+ self.schedule_ping()
+ elif pkt.packet_type == packet.MESSAGE:
+ self.server._trigger_event('message', self.sid, pkt.data,
+ run_async=self.server.async_handlers)
+ elif pkt.packet_type == packet.UPGRADE:
+ self.send(packet.Packet(packet.NOOP))
+ elif pkt.packet_type == packet.CLOSE:
+ self.close(wait=False, abort=True)
+ else:
+ raise exceptions.UnknownPacketError()
+
+ def check_ping_timeout(self):
+ """Make sure the client is still responding to pings."""
+ if self.closed:
+ raise exceptions.SocketIsClosedError()
+ if self.last_ping and \
+ time.time() - self.last_ping > self.server.ping_timeout:
+ self.server.logger.info('%s: Client is gone, closing socket',
+ self.sid)
+ # Passing abort=False here will cause close() to write a
+ # CLOSE packet. This has the effect of updating half-open sockets
+ # to their correct state of disconnected
+ self.close(wait=False, abort=False)
+ return False
+ return True
+
+ def send(self, pkt):
+ """Send a packet to the client."""
+ if not self.check_ping_timeout():
+ return
+ else:
+ self.queue.put(pkt)
+ self.server.logger.info('%s: Sending packet %s data %s',
+ self.sid, packet.packet_names[pkt.packet_type],
+ pkt.data if not isinstance(pkt.data, bytes)
+ else '')
+
+ def handle_get_request(self, environ, start_response):
+ """Handle a long-polling GET request from the client."""
+ connections = [
+ s.strip()
+ for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
+ transport = environ.get('HTTP_UPGRADE', '').lower()
+ if 'upgrade' in connections and transport in self.upgrade_protocols:
+ self.server.logger.info('%s: Received request to upgrade to %s',
+ self.sid, transport)
+ return getattr(self, '_upgrade_' + transport)(environ,
+ start_response)
+ if self.upgrading or self.upgraded:
+ # we are upgrading to WebSocket, do not return any more packets
+ # through the polling endpoint
+ return [packet.Packet(packet.NOOP)]
+ try:
+ packets = self.poll()
+ except exceptions.QueueEmpty:
+ exc = sys.exc_info()
+ self.close(wait=False)
+ raise exc[1].with_traceback(exc[2])
+ return packets
+
+ def handle_post_request(self, environ):
+ """Handle a long-polling POST request from the client."""
+ length = int(environ.get('CONTENT_LENGTH', '0'))
+ if length > self.server.max_http_buffer_size:
+ raise exceptions.ContentTooLongError()
+ else:
+ body = environ['wsgi.input'].read(length).decode('utf-8')
+ p = payload.Payload(encoded_payload=body)
+ for pkt in p.packets:
+ self.receive(pkt)
+
+ def close(self, wait=True, abort=False):
+ """Close the socket connection."""
+ if not self.closed and not self.closing:
+ self.closing = True
+ self.server._trigger_event('disconnect', self.sid, run_async=False)
+ if not abort:
+ self.send(packet.Packet(packet.CLOSE))
+ self.closed = True
+ self.queue.put(None)
+ if wait:
+ self.queue.join()
+
+ def schedule_ping(self):
+ def send_ping():
+ self.last_ping = None
+ self.server.sleep(self.server.ping_interval)
+ if not self.closing and not self.closed:
+ self.last_ping = time.time()
+ self.send(packet.Packet(packet.PING))
+
+ self.server.start_background_task(send_ping)
+
+ def _upgrade_websocket(self, environ, start_response):
+ """Upgrade the connection from polling to websocket."""
+ if self.upgraded:
+ raise IOError('Socket has been upgraded already')
+ if self.server._async['websocket'] is None:
+ # the selected async mode does not support websocket
+ return self.server._bad_request()
+ ws = self.server._async['websocket'](self._websocket_handler)
+ return ws(environ, start_response)
+
+ def _websocket_handler(self, ws):
+ """Engine.IO handler for websocket transport."""
+ # try to set a socket timeout matching the configured ping interval
+ # and timeout
+ for attr in ['_sock', 'socket']: # pragma: no cover
+ if hasattr(ws, attr) and hasattr(getattr(ws, attr), 'settimeout'):
+ getattr(ws, attr).settimeout(
+ self.server.ping_interval + self.server.ping_timeout)
+
+ if self.connected:
+ # the socket was already connected, so this is an upgrade
+ self.upgrading = True # hold packet sends during the upgrade
+
+ pkt = ws.wait()
+ decoded_pkt = packet.Packet(encoded_packet=pkt)
+ if decoded_pkt.packet_type != packet.PING or \
+ decoded_pkt.data != 'probe':
+ self.server.logger.info(
+ '%s: Failed websocket upgrade, no PING packet', self.sid)
+ self.upgrading = False
+ return []
+ ws.send(packet.Packet(packet.PONG, data='probe').encode())
+ self.queue.put(packet.Packet(packet.NOOP)) # end poll
+
+ pkt = ws.wait()
+ decoded_pkt = packet.Packet(encoded_packet=pkt)
+ if decoded_pkt.packet_type != packet.UPGRADE:
+ self.upgraded = False
+ self.server.logger.info(
+ ('%s: Failed websocket upgrade, expected UPGRADE packet, '
+ 'received %s instead.'),
+ self.sid, pkt)
+ self.upgrading = False
+ return []
+ self.upgraded = True
+ self.upgrading = False
+ else:
+ self.connected = True
+ self.upgraded = True
+
+ # start separate writer thread
+ def writer():
+ while True:
+ packets = None
+ try:
+ packets = self.poll()
+ except exceptions.QueueEmpty:
+ break
+ if not packets:
+ # empty packet list returned -> connection closed
+ break
+ try:
+ for pkt in packets:
+ ws.send(pkt.encode())
+ except:
+ break
+ writer_task = self.server.start_background_task(writer)
+
+ self.server.logger.info(
+ '%s: Upgrade to websocket successful', self.sid)
+
+ while True:
+ p = None
+ try:
+ p = ws.wait()
+ except Exception as e:
+ # if the socket is already closed, we can assume this is a
+ # downstream error of that
+ if not self.closed: # pragma: no cover
+ self.server.logger.info(
+ '%s: Unexpected error "%s", closing connection',
+ self.sid, str(e))
+ break
+ if p is None:
+ # connection closed by client
+ break
+ pkt = packet.Packet(encoded_packet=p)
+ try:
+ self.receive(pkt)
+ except exceptions.UnknownPacketError: # pragma: no cover
+ pass
+ except exceptions.SocketIsClosedError: # pragma: no cover
+ self.server.logger.info('Receive error -- socket is closed')
+ break
+ except: # pragma: no cover
+ # if we get an unexpected exception we log the error and exit
+ # the connection properly
+ self.server.logger.exception('Unknown receive error')
+ break
+
+ self.queue.put(None) # unlock the writer task so that it can exit
+ writer_task.join()
+ self.close(wait=False, abort=True)
+
+ return []
diff --git a/matteo_env/Lib/site-packages/engineio/static_files.py b/matteo_env/Lib/site-packages/engineio/static_files.py
new file mode 100644
index 0000000..3058f6e
--- /dev/null
+++ b/matteo_env/Lib/site-packages/engineio/static_files.py
@@ -0,0 +1,55 @@
+content_types = {
+ 'css': 'text/css',
+ 'gif': 'image/gif',
+ 'html': 'text/html',
+ 'jpg': 'image/jpeg',
+ 'js': 'application/javascript',
+ 'json': 'application/json',
+ 'png': 'image/png',
+ 'txt': 'text/plain',
+}
+
+
+def get_static_file(path, static_files):
+ """Return the local filename and content type for the requested static
+ file URL.
+
+ :param path: the path portion of the requested URL.
+ :param static_files: a static file configuration dictionary.
+
+ This function returns a dictionary with two keys, "filename" and
+ "content_type". If the requested URL does not match any static file, the
+ return value is None.
+ """
+ if path in static_files:
+ f = static_files[path]
+ else:
+ f = None
+ rest = ''
+ while path != '':
+ path, last = path.rsplit('/', 1)
+ rest = '/' + last + rest
+ if path in static_files:
+ f = static_files[path] + rest
+ break
+ elif path + '/' in static_files:
+ f = static_files[path + '/'] + rest[1:]
+ break
+ if f:
+ if isinstance(f, str):
+ f = {'filename': f}
+ if f['filename'].endswith('/'):
+ if '' in static_files:
+ if isinstance(static_files[''], str):
+ f['filename'] += static_files['']
+ else:
+ f['filename'] += static_files['']['filename']
+ if 'content_type' in static_files['']:
+ f['content_type'] = static_files['']['content_type']
+ else:
+ f['filename'] += 'index.html'
+ if 'content_type' not in f:
+ ext = f['filename'].rsplit('.')[-1]
+ f['content_type'] = content_types.get(
+ ext, 'application/octet-stream')
+ return f
diff --git a/matteo_env/Lib/site-packages/flask/__init__.py b/matteo_env/Lib/site-packages/flask/__init__.py
new file mode 100644
index 0000000..1a487e1
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/__init__.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+"""
+ flask
+ ~~~~~
+
+ A microframework based on Werkzeug. It's extensively documented
+ and follows best practice patterns.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+# utilities we import from Werkzeug and Jinja2 that are unused
+# in the module but are exported as public interface.
+from jinja2 import escape
+from jinja2 import Markup
+from werkzeug.exceptions import abort
+from werkzeug.utils import redirect
+
+from . import json
+from ._compat import json_available
+from .app import Flask
+from .app import Request
+from .app import Response
+from .blueprints import Blueprint
+from .config import Config
+from .ctx import after_this_request
+from .ctx import copy_current_request_context
+from .ctx import has_app_context
+from .ctx import has_request_context
+from .globals import _app_ctx_stack
+from .globals import _request_ctx_stack
+from .globals import current_app
+from .globals import g
+from .globals import request
+from .globals import session
+from .helpers import flash
+from .helpers import get_flashed_messages
+from .helpers import get_template_attribute
+from .helpers import make_response
+from .helpers import safe_join
+from .helpers import send_file
+from .helpers import send_from_directory
+from .helpers import stream_with_context
+from .helpers import url_for
+from .json import jsonify
+from .signals import appcontext_popped
+from .signals import appcontext_pushed
+from .signals import appcontext_tearing_down
+from .signals import before_render_template
+from .signals import got_request_exception
+from .signals import message_flashed
+from .signals import request_finished
+from .signals import request_started
+from .signals import request_tearing_down
+from .signals import signals_available
+from .signals import template_rendered
+from .templating import render_template
+from .templating import render_template_string
+
+__version__ = "1.1.2"
diff --git a/matteo_env/Lib/site-packages/flask/__main__.py b/matteo_env/Lib/site-packages/flask/__main__.py
new file mode 100644
index 0000000..f61dbc0
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/__main__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.__main__
+ ~~~~~~~~~~~~~~
+
+ Alias for flask.run for the command line.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+
+if __name__ == "__main__":
+ from .cli import main
+
+ main(as_module=True)
diff --git a/matteo_env/Lib/site-packages/flask/_compat.py b/matteo_env/Lib/site-packages/flask/_compat.py
new file mode 100644
index 0000000..76c442c
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/_compat.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+"""
+ flask._compat
+ ~~~~~~~~~~~~~
+
+ Some py2/py3 compatibility support based on a stripped down
+ version of six so we don't have to depend on a specific version
+ of it.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+import sys
+
+PY2 = sys.version_info[0] == 2
+_identity = lambda x: x
+
+try: # Python 2
+ text_type = unicode
+ string_types = (str, unicode)
+ integer_types = (int, long)
+except NameError: # Python 3
+ text_type = str
+ string_types = (str,)
+ integer_types = (int,)
+
+if not PY2:
+ iterkeys = lambda d: iter(d.keys())
+ itervalues = lambda d: iter(d.values())
+ iteritems = lambda d: iter(d.items())
+
+ from inspect import getfullargspec as getargspec
+ from io import StringIO
+ import collections.abc as collections_abc
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+ implements_to_string = _identity
+
+else:
+ iterkeys = lambda d: d.iterkeys()
+ itervalues = lambda d: d.itervalues()
+ iteritems = lambda d: d.iteritems()
+
+ from inspect import getargspec
+ from cStringIO import StringIO
+ import collections as collections_abc
+
+ exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
+
+ def implements_to_string(cls):
+ cls.__unicode__ = cls.__str__
+ cls.__str__ = lambda x: x.__unicode__().encode("utf-8")
+ return cls
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a
+ # dummy metaclass for one level of class instantiation that replaces
+ # itself with the actual metaclass.
+ class metaclass(type):
+ def __new__(metacls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
+
+
+# Certain versions of pypy have a bug where clearing the exception stack
+# breaks the __exit__ function in a very peculiar way. The second level of
+# exception blocks is necessary because pypy seems to forget to check if an
+# exception happened until the next bytecode instruction?
+#
+# Relevant PyPy bugfix commit:
+# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301
+# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later
+# versions.
+#
+# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug.
+BROKEN_PYPY_CTXMGR_EXIT = False
+if hasattr(sys, "pypy_version_info"):
+
+ class _Mgr(object):
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ if hasattr(sys, "exc_clear"):
+ # Python 3 (PyPy3) doesn't have exc_clear
+ sys.exc_clear()
+
+ try:
+ try:
+ with _Mgr():
+ raise AssertionError()
+ except: # noqa: B001
+ # We intentionally use a bare except here. See the comment above
+ # regarding a pypy bug as to why.
+ raise
+ except TypeError:
+ BROKEN_PYPY_CTXMGR_EXIT = True
+ except AssertionError:
+ pass
+
+
+try:
+ from os import fspath
+except ImportError:
+ # Backwards compatibility as proposed in PEP 0519:
+ # https://www.python.org/dev/peps/pep-0519/#backwards-compatibility
+ def fspath(path):
+ return path.__fspath__() if hasattr(path, "__fspath__") else path
+
+
+class _DeprecatedBool(object):
+ def __init__(self, name, version, value):
+ self.message = "'{}' is deprecated and will be removed in version {}.".format(
+ name, version
+ )
+ self.value = value
+
+ def _warn(self):
+ import warnings
+
+ warnings.warn(self.message, DeprecationWarning, stacklevel=2)
+
+ def __eq__(self, other):
+ self._warn()
+ return other == self.value
+
+ def __ne__(self, other):
+ self._warn()
+ return other != self.value
+
+ def __bool__(self):
+ self._warn()
+ return self.value
+
+ __nonzero__ = __bool__
+
+
+json_available = _DeprecatedBool("flask.json_available", "2.0.0", True)
diff --git a/matteo_env/Lib/site-packages/flask/app.py b/matteo_env/Lib/site-packages/flask/app.py
new file mode 100644
index 0000000..e385899
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/app.py
@@ -0,0 +1,2467 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.app
+ ~~~~~~~~~
+
+ This module implements the central WSGI application object.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+import os
+import sys
+import warnings
+from datetime import timedelta
+from functools import update_wrapper
+from itertools import chain
+from threading import Lock
+
+from werkzeug.datastructures import Headers
+from werkzeug.datastructures import ImmutableDict
+from werkzeug.exceptions import BadRequest
+from werkzeug.exceptions import BadRequestKeyError
+from werkzeug.exceptions import default_exceptions
+from werkzeug.exceptions import HTTPException
+from werkzeug.exceptions import InternalServerError
+from werkzeug.exceptions import MethodNotAllowed
+from werkzeug.routing import BuildError
+from werkzeug.routing import Map
+from werkzeug.routing import RequestRedirect
+from werkzeug.routing import RoutingException
+from werkzeug.routing import Rule
+from werkzeug.wrappers import BaseResponse
+
+from . import cli
+from . import json
+from ._compat import integer_types
+from ._compat import reraise
+from ._compat import string_types
+from ._compat import text_type
+from .config import Config
+from .config import ConfigAttribute
+from .ctx import _AppCtxGlobals
+from .ctx import AppContext
+from .ctx import RequestContext
+from .globals import _request_ctx_stack
+from .globals import g
+from .globals import request
+from .globals import session
+from .helpers import _endpoint_from_view_func
+from .helpers import _PackageBoundObject
+from .helpers import find_package
+from .helpers import get_debug_flag
+from .helpers import get_env
+from .helpers import get_flashed_messages
+from .helpers import get_load_dotenv
+from .helpers import locked_cached_property
+from .helpers import url_for
+from .json import jsonify
+from .logging import create_logger
+from .sessions import SecureCookieSessionInterface
+from .signals import appcontext_tearing_down
+from .signals import got_request_exception
+from .signals import request_finished
+from .signals import request_started
+from .signals import request_tearing_down
+from .templating import _default_template_ctx_processor
+from .templating import DispatchingJinjaLoader
+from .templating import Environment
+from .wrappers import Request
+from .wrappers import Response
+
+# a singleton sentinel value for parameter defaults
+_sentinel = object()
+
+
+def _make_timedelta(value):
+ if not isinstance(value, timedelta):
+ return timedelta(seconds=value)
+ return value
+
+
+def setupmethod(f):
+ """Wraps a method so that it performs a check in debug mode if the
+ first request was already handled.
+ """
+
+ def wrapper_func(self, *args, **kwargs):
+ if self.debug and self._got_first_request:
+ raise AssertionError(
+ "A setup function was called after the "
+ "first request was handled. This usually indicates a bug "
+ "in the application where a module was not imported "
+ "and decorators or other functionality was called too late.\n"
+ "To fix this make sure to import all your view modules, "
+ "database models and everything related at a central place "
+ "before the application starts serving requests."
+ )
+ return f(self, *args, **kwargs)
+
+ return update_wrapper(wrapper_func, f)
+
+
+class Flask(_PackageBoundObject):
+ """The flask object implements a WSGI application and acts as the central
+ object. It is passed the name of the module or package of the
+ application. Once it is created it will act as a central registry for
+ the view functions, the URL rules, template configuration and much more.
+
+ The name of the package is used to resolve resources from inside the
+ package or the folder the module is contained in depending on if the
+ package parameter resolves to an actual python package (a folder with
+ an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file).
+
+ For more information about resource loading, see :func:`open_resource`.
+
+ Usually you create a :class:`Flask` instance in your main module or
+ in the :file:`__init__.py` file of your package like this::
+
+ from flask import Flask
+ app = Flask(__name__)
+
+ .. admonition:: About the First Parameter
+
+ The idea of the first parameter is to give Flask an idea of what
+ belongs to your application. This name is used to find resources
+ on the filesystem, can be used by extensions to improve debugging
+ information and a lot more.
+
+ So it's important what you provide there. If you are using a single
+ module, `__name__` is always the correct value. If you however are
+ using a package, it's usually recommended to hardcode the name of
+ your package there.
+
+ For example if your application is defined in :file:`yourapplication/app.py`
+ you should create it with one of the two versions below::
+
+ app = Flask('yourapplication')
+ app = Flask(__name__.split('.')[0])
+
+ Why is that? The application will work even with `__name__`, thanks
+ to how resources are looked up. However it will make debugging more
+ painful. Certain extensions can make assumptions based on the
+ import name of your application. For example the Flask-SQLAlchemy
+ extension will look for the code in your application that triggered
+ an SQL query in debug mode. If the import name is not properly set
+ up, that debugging information is lost. (For example it would only
+ pick up SQL queries in `yourapplication.app` and not
+ `yourapplication.views.frontend`)
+
+ .. versionadded:: 0.7
+ The `static_url_path`, `static_folder`, and `template_folder`
+ parameters were added.
+
+ .. versionadded:: 0.8
+ The `instance_path` and `instance_relative_config` parameters were
+ added.
+
+ .. versionadded:: 0.11
+ The `root_path` parameter was added.
+
+ .. versionadded:: 1.0
+ The ``host_matching`` and ``static_host`` parameters were added.
+
+ .. versionadded:: 1.0
+ The ``subdomain_matching`` parameter was added. Subdomain
+ matching needs to be enabled manually now. Setting
+ :data:`SERVER_NAME` does not implicitly enable it.
+
+ :param import_name: the name of the application package
+ :param static_url_path: can be used to specify a different path for the
+ static files on the web. Defaults to the name
+ of the `static_folder` folder.
+ :param static_folder: The folder with static files that is served at
+ ``static_url_path``. Relative to the application ``root_path``
+ or an absolute path. Defaults to ``'static'``.
+ :param static_host: the host to use when adding the static route.
+ Defaults to None. Required when using ``host_matching=True``
+ with a ``static_folder`` configured.
+ :param host_matching: set ``url_map.host_matching`` attribute.
+ Defaults to False.
+ :param subdomain_matching: consider the subdomain relative to
+ :data:`SERVER_NAME` when matching routes. Defaults to False.
+ :param template_folder: the folder that contains the templates that should
+ be used by the application. Defaults to
+ ``'templates'`` folder in the root path of the
+ application.
+ :param instance_path: An alternative instance path for the application.
+ By default the folder ``'instance'`` next to the
+ package or module is assumed to be the instance
+ path.
+ :param instance_relative_config: if set to ``True`` relative filenames
+ for loading the config are assumed to
+ be relative to the instance path instead
+ of the application root.
+ :param root_path: Flask by default will automatically calculate the path
+ to the root of the application. In certain situations
+ this cannot be achieved (for instance if the package
+ is a Python 3 namespace package) and needs to be
+ manually defined.
+ """
+
+ #: The class that is used for request objects. See :class:`~flask.Request`
+ #: for more information.
+ request_class = Request
+
+ #: The class that is used for response objects. See
+ #: :class:`~flask.Response` for more information.
+ response_class = Response
+
+ #: The class that is used for the Jinja environment.
+ #:
+ #: .. versionadded:: 0.11
+ jinja_environment = Environment
+
+ #: The class that is used for the :data:`~flask.g` instance.
+ #:
+ #: Example use cases for a custom class:
+ #:
+ #: 1. Store arbitrary attributes on flask.g.
+ #: 2. Add a property for lazy per-request database connectors.
+ #: 3. Return None instead of AttributeError on unexpected attributes.
+ #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g.
+ #:
+ #: In Flask 0.9 this property was called `request_globals_class` but it
+ #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the
+ #: flask.g object is now application context scoped.
+ #:
+ #: .. versionadded:: 0.10
+ app_ctx_globals_class = _AppCtxGlobals
+
+ #: The class that is used for the ``config`` attribute of this app.
+ #: Defaults to :class:`~flask.Config`.
+ #:
+ #: Example use cases for a custom class:
+ #:
+ #: 1. Default values for certain config options.
+ #: 2. Access to config values through attributes in addition to keys.
+ #:
+ #: .. versionadded:: 0.11
+ config_class = Config
+
+ #: The testing flag. Set this to ``True`` to enable the test mode of
+ #: Flask extensions (and in the future probably also Flask itself).
+ #: For example this might activate test helpers that have an
+ #: additional runtime cost which should not be enabled by default.
+ #:
+ #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the
+ #: default it's implicitly enabled.
+ #:
+ #: This attribute can also be configured from the config with the
+ #: ``TESTING`` configuration key. Defaults to ``False``.
+ testing = ConfigAttribute("TESTING")
+
+ #: If a secret key is set, cryptographic components can use this to
+ #: sign cookies and other things. Set this to a complex random value
+ #: when you want to use the secure cookie for instance.
+ #:
+ #: This attribute can also be configured from the config with the
+ #: :data:`SECRET_KEY` configuration key. Defaults to ``None``.
+ secret_key = ConfigAttribute("SECRET_KEY")
+
+ #: The secure cookie uses this for the name of the session cookie.
+ #:
+ #: This attribute can also be configured from the config with the
+ #: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'``
+ session_cookie_name = ConfigAttribute("SESSION_COOKIE_NAME")
+
+ #: A :class:`~datetime.timedelta` which is used to set the expiration
+ #: date of a permanent session. The default is 31 days which makes a
+ #: permanent session survive for roughly one month.
+ #:
+ #: This attribute can also be configured from the config with the
+ #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to
+ #: ``timedelta(days=31)``
+ permanent_session_lifetime = ConfigAttribute(
+ "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta
+ )
+
+ #: A :class:`~datetime.timedelta` which is used as default cache_timeout
+ #: for the :func:`send_file` functions. The default is 12 hours.
+ #:
+ #: This attribute can also be configured from the config with the
+ #: ``SEND_FILE_MAX_AGE_DEFAULT`` configuration key. This configuration
+ #: variable can also be set with an integer value used as seconds.
+ #: Defaults to ``timedelta(hours=12)``
+ send_file_max_age_default = ConfigAttribute(
+ "SEND_FILE_MAX_AGE_DEFAULT", get_converter=_make_timedelta
+ )
+
+ #: Enable this if you want to use the X-Sendfile feature. Keep in
+ #: mind that the server has to support this. This only affects files
+ #: sent with the :func:`send_file` method.
+ #:
+ #: .. versionadded:: 0.2
+ #:
+ #: This attribute can also be configured from the config with the
+ #: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``.
+ use_x_sendfile = ConfigAttribute("USE_X_SENDFILE")
+
+ #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`.
+ #:
+ #: .. versionadded:: 0.10
+ json_encoder = json.JSONEncoder
+
+ #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`.
+ #:
+ #: .. versionadded:: 0.10
+ json_decoder = json.JSONDecoder
+
+ #: Options that are passed to the Jinja environment in
+ #: :meth:`create_jinja_environment`. Changing these options after
+ #: the environment is created (accessing :attr:`jinja_env`) will
+ #: have no effect.
+ #:
+ #: .. versionchanged:: 1.1.0
+ #: This is a ``dict`` instead of an ``ImmutableDict`` to allow
+ #: easier configuration.
+ #:
+ jinja_options = {"extensions": ["jinja2.ext.autoescape", "jinja2.ext.with_"]}
+
+ #: Default configuration parameters.
+ default_config = ImmutableDict(
+ {
+ "ENV": None,
+ "DEBUG": None,
+ "TESTING": False,
+ "PROPAGATE_EXCEPTIONS": None,
+ "PRESERVE_CONTEXT_ON_EXCEPTION": None,
+ "SECRET_KEY": None,
+ "PERMANENT_SESSION_LIFETIME": timedelta(days=31),
+ "USE_X_SENDFILE": False,
+ "SERVER_NAME": None,
+ "APPLICATION_ROOT": "/",
+ "SESSION_COOKIE_NAME": "session",
+ "SESSION_COOKIE_DOMAIN": None,
+ "SESSION_COOKIE_PATH": None,
+ "SESSION_COOKIE_HTTPONLY": True,
+ "SESSION_COOKIE_SECURE": False,
+ "SESSION_COOKIE_SAMESITE": None,
+ "SESSION_REFRESH_EACH_REQUEST": True,
+ "MAX_CONTENT_LENGTH": None,
+ "SEND_FILE_MAX_AGE_DEFAULT": timedelta(hours=12),
+ "TRAP_BAD_REQUEST_ERRORS": None,
+ "TRAP_HTTP_EXCEPTIONS": False,
+ "EXPLAIN_TEMPLATE_LOADING": False,
+ "PREFERRED_URL_SCHEME": "http",
+ "JSON_AS_ASCII": True,
+ "JSON_SORT_KEYS": True,
+ "JSONIFY_PRETTYPRINT_REGULAR": False,
+ "JSONIFY_MIMETYPE": "application/json",
+ "TEMPLATES_AUTO_RELOAD": None,
+ "MAX_COOKIE_SIZE": 4093,
+ }
+ )
+
+ #: The rule object to use for URL rules created. This is used by
+ #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`.
+ #:
+ #: .. versionadded:: 0.7
+ url_rule_class = Rule
+
+ #: The map object to use for storing the URL rules and routing
+ #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`.
+ #:
+ #: .. versionadded:: 1.1.0
+ url_map_class = Map
+
+ #: the test client that is used with when `test_client` is used.
+ #:
+ #: .. versionadded:: 0.7
+ test_client_class = None
+
+ #: The :class:`~click.testing.CliRunner` subclass, by default
+ #: :class:`~flask.testing.FlaskCliRunner` that is used by
+ #: :meth:`test_cli_runner`. Its ``__init__`` method should take a
+ #: Flask app object as the first argument.
+ #:
+ #: .. versionadded:: 1.0
+ test_cli_runner_class = None
+
+ #: the session interface to use. By default an instance of
+ #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here.
+ #:
+ #: .. versionadded:: 0.8
+ session_interface = SecureCookieSessionInterface()
+
+ # TODO remove the next three attrs when Sphinx :inherited-members: works
+ # https://github.com/sphinx-doc/sphinx/issues/741
+
+ #: The name of the package or module that this app belongs to. Do not
+ #: change this once it is set by the constructor.
+ import_name = None
+
+ #: Location of the template files to be added to the template lookup.
+ #: ``None`` if templates should not be added.
+ template_folder = None
+
+ #: Absolute path to the package on the filesystem. Used to look up
+ #: resources contained in the package.
+ root_path = None
+
+ def __init__(
+ self,
+ import_name,
+ static_url_path=None,
+ static_folder="static",
+ static_host=None,
+ host_matching=False,
+ subdomain_matching=False,
+ template_folder="templates",
+ instance_path=None,
+ instance_relative_config=False,
+ root_path=None,
+ ):
+ _PackageBoundObject.__init__(
+ self, import_name, template_folder=template_folder, root_path=root_path
+ )
+
+ self.static_url_path = static_url_path
+ self.static_folder = static_folder
+
+ if instance_path is None:
+ instance_path = self.auto_find_instance_path()
+ elif not os.path.isabs(instance_path):
+ raise ValueError(
+ "If an instance path is provided it must be absolute."
+ " A relative path was given instead."
+ )
+
+ #: Holds the path to the instance folder.
+ #:
+ #: .. versionadded:: 0.8
+ self.instance_path = instance_path
+
+ #: The configuration dictionary as :class:`Config`. This behaves
+ #: exactly like a regular dictionary but supports additional methods
+ #: to load a config from files.
+ self.config = self.make_config(instance_relative_config)
+
+ #: A dictionary of all view functions registered. The keys will
+ #: be function names which are also used to generate URLs and
+ #: the values are the function objects themselves.
+ #: To register a view function, use the :meth:`route` decorator.
+ self.view_functions = {}
+
+ #: A dictionary of all registered error handlers. The key is ``None``
+ #: for error handlers active on the application, otherwise the key is
+ #: the name of the blueprint. Each key points to another dictionary
+ #: where the key is the status code of the http exception. The
+ #: special key ``None`` points to a list of tuples where the first item
+ #: is the class for the instance check and the second the error handler
+ #: function.
+ #:
+ #: To register an error handler, use the :meth:`errorhandler`
+ #: decorator.
+ self.error_handler_spec = {}
+
+ #: A list of functions that are called when :meth:`url_for` raises a
+ #: :exc:`~werkzeug.routing.BuildError`. Each function registered here
+ #: is called with `error`, `endpoint` and `values`. If a function
+ #: returns ``None`` or raises a :exc:`BuildError` the next function is
+ #: tried.
+ #:
+ #: .. versionadded:: 0.9
+ self.url_build_error_handlers = []
+
+ #: A dictionary with lists of functions that will be called at the
+ #: beginning of each request. The key of the dictionary is the name of
+ #: the blueprint this function is active for, or ``None`` for all
+ #: requests. To register a function, use the :meth:`before_request`
+ #: decorator.
+ self.before_request_funcs = {}
+
+ #: A list of functions that will be called at the beginning of the
+ #: first request to this instance. To register a function, use the
+ #: :meth:`before_first_request` decorator.
+ #:
+ #: .. versionadded:: 0.8
+ self.before_first_request_funcs = []
+
+ #: A dictionary with lists of functions that should be called after
+ #: each request. The key of the dictionary is the name of the blueprint
+ #: this function is active for, ``None`` for all requests. This can for
+ #: example be used to close database connections. To register a function
+ #: here, use the :meth:`after_request` decorator.
+ self.after_request_funcs = {}
+
+ #: A dictionary with lists of functions that are called after
+ #: each request, even if an exception has occurred. The key of the
+ #: dictionary is the name of the blueprint this function is active for,
+ #: ``None`` for all requests. These functions are not allowed to modify
+ #: the request, and their return values are ignored. If an exception
+ #: occurred while processing the request, it gets passed to each
+ #: teardown_request function. To register a function here, use the
+ #: :meth:`teardown_request` decorator.
+ #:
+ #: .. versionadded:: 0.7
+ self.teardown_request_funcs = {}
+
+ #: A list of functions that are called when the application context
+ #: is destroyed. Since the application context is also torn down
+ #: if the request ends this is the place to store code that disconnects
+ #: from databases.
+ #:
+ #: .. versionadded:: 0.9
+ self.teardown_appcontext_funcs = []
+
+ #: A dictionary with lists of functions that are called before the
+ #: :attr:`before_request_funcs` functions. The key of the dictionary is
+ #: the name of the blueprint this function is active for, or ``None``
+ #: for all requests. To register a function, use
+ #: :meth:`url_value_preprocessor`.
+ #:
+ #: .. versionadded:: 0.7
+ self.url_value_preprocessors = {}
+
+ #: A dictionary with lists of functions that can be used as URL value
+ #: preprocessors. The key ``None`` here is used for application wide
+ #: callbacks, otherwise the key is the name of the blueprint.
+ #: Each of these functions has the chance to modify the dictionary
+ #: of URL values before they are used as the keyword arguments of the
+ #: view function. For each function registered this one should also
+ #: provide a :meth:`url_defaults` function that adds the parameters
+ #: automatically again that were removed that way.
+ #:
+ #: .. versionadded:: 0.7
+ self.url_default_functions = {}
+
+ #: A dictionary with list of functions that are called without argument
+ #: to populate the template context. The key of the dictionary is the
+ #: name of the blueprint this function is active for, ``None`` for all
+ #: requests. Each returns a dictionary that the template context is
+ #: updated with. To register a function here, use the
+ #: :meth:`context_processor` decorator.
+ self.template_context_processors = {None: [_default_template_ctx_processor]}
+
+ #: A list of shell context processor functions that should be run
+ #: when a shell context is created.
+ #:
+ #: .. versionadded:: 0.11
+ self.shell_context_processors = []
+
+ #: all the attached blueprints in a dictionary by name. Blueprints
+ #: can be attached multiple times so this dictionary does not tell
+ #: you how often they got attached.
+ #:
+ #: .. versionadded:: 0.7
+ self.blueprints = {}
+ self._blueprint_order = []
+
+ #: a place where extensions can store application specific state. For
+ #: example this is where an extension could store database engines and
+ #: similar things. For backwards compatibility extensions should register
+ #: themselves like this::
+ #:
+ #: if not hasattr(app, 'extensions'):
+ #: app.extensions = {}
+ #: app.extensions['extensionname'] = SomeObject()
+ #:
+ #: The key must match the name of the extension module. For example in
+ #: case of a "Flask-Foo" extension in `flask_foo`, the key would be
+ #: ``'foo'``.
+ #:
+ #: .. versionadded:: 0.7
+ self.extensions = {}
+
+ #: The :class:`~werkzeug.routing.Map` for this instance. You can use
+ #: this to change the routing converters after the class was created
+ #: but before any routes are connected. Example::
+ #:
+ #: from werkzeug.routing import BaseConverter
+ #:
+ #: class ListConverter(BaseConverter):
+ #: def to_python(self, value):
+ #: return value.split(',')
+ #: def to_url(self, values):
+ #: return ','.join(super(ListConverter, self).to_url(value)
+ #: for value in values)
+ #:
+ #: app = Flask(__name__)
+ #: app.url_map.converters['list'] = ListConverter
+ self.url_map = self.url_map_class()
+
+ self.url_map.host_matching = host_matching
+ self.subdomain_matching = subdomain_matching
+
+ # tracks internally if the application already handled at least one
+ # request.
+ self._got_first_request = False
+ self._before_request_lock = Lock()
+
+ # Add a static route using the provided static_url_path, static_host,
+ # and static_folder if there is a configured static_folder.
+ # Note we do this without checking if static_folder exists.
+ # For one, it might be created while the server is running (e.g. during
+ # development). Also, Google App Engine stores static files somewhere
+ if self.has_static_folder:
+ assert (
+ bool(static_host) == host_matching
+ ), "Invalid static_host/host_matching combination"
+ self.add_url_rule(
+ self.static_url_path + "/",
+ endpoint="static",
+ host=static_host,
+ view_func=self.send_static_file,
+ )
+
+ # Set the name of the Click group in case someone wants to add
+ # the app's commands to another CLI tool.
+ self.cli.name = self.name
+
+ @locked_cached_property
+ def name(self):
+ """The name of the application. This is usually the import name
+ with the difference that it's guessed from the run file if the
+ import name is main. This name is used as a display name when
+ Flask needs the name of the application. It can be set and overridden
+ to change the value.
+
+ .. versionadded:: 0.8
+ """
+ if self.import_name == "__main__":
+ fn = getattr(sys.modules["__main__"], "__file__", None)
+ if fn is None:
+ return "__main__"
+ return os.path.splitext(os.path.basename(fn))[0]
+ return self.import_name
+
+ @property
+ def propagate_exceptions(self):
+ """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration
+ value in case it's set, otherwise a sensible default is returned.
+
+ .. versionadded:: 0.7
+ """
+ rv = self.config["PROPAGATE_EXCEPTIONS"]
+ if rv is not None:
+ return rv
+ return self.testing or self.debug
+
+ @property
+ def preserve_context_on_exception(self):
+ """Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION``
+ configuration value in case it's set, otherwise a sensible default
+ is returned.
+
+ .. versionadded:: 0.7
+ """
+ rv = self.config["PRESERVE_CONTEXT_ON_EXCEPTION"]
+ if rv is not None:
+ return rv
+ return self.debug
+
+ @locked_cached_property
+ def logger(self):
+ """A standard Python :class:`~logging.Logger` for the app, with
+ the same name as :attr:`name`.
+
+ In debug mode, the logger's :attr:`~logging.Logger.level` will
+ be set to :data:`~logging.DEBUG`.
+
+ If there are no handlers configured, a default handler will be
+ added. See :doc:`/logging` for more information.
+
+ .. versionchanged:: 1.1.0
+ The logger takes the same name as :attr:`name` rather than
+ hard-coding ``"flask.app"``.
+
+ .. versionchanged:: 1.0.0
+ Behavior was simplified. The logger is always named
+ ``"flask.app"``. The level is only set during configuration,
+ it doesn't check ``app.debug`` each time. Only one format is
+ used, not different ones depending on ``app.debug``. No
+ handlers are removed, and a handler is only added if no
+ handlers are already configured.
+
+ .. versionadded:: 0.3
+ """
+ return create_logger(self)
+
+ @locked_cached_property
+ def jinja_env(self):
+ """The Jinja environment used to load templates.
+
+ The environment is created the first time this property is
+ accessed. Changing :attr:`jinja_options` after that will have no
+ effect.
+ """
+ return self.create_jinja_environment()
+
+ @property
+ def got_first_request(self):
+ """This attribute is set to ``True`` if the application started
+ handling the first request.
+
+ .. versionadded:: 0.8
+ """
+ return self._got_first_request
+
+ def make_config(self, instance_relative=False):
+ """Used to create the config attribute by the Flask constructor.
+ The `instance_relative` parameter is passed in from the constructor
+ of Flask (there named `instance_relative_config`) and indicates if
+ the config should be relative to the instance path or the root path
+ of the application.
+
+ .. versionadded:: 0.8
+ """
+ root_path = self.root_path
+ if instance_relative:
+ root_path = self.instance_path
+ defaults = dict(self.default_config)
+ defaults["ENV"] = get_env()
+ defaults["DEBUG"] = get_debug_flag()
+ return self.config_class(root_path, defaults)
+
+ def auto_find_instance_path(self):
+ """Tries to locate the instance path if it was not provided to the
+ constructor of the application class. It will basically calculate
+ the path to a folder named ``instance`` next to your main file or
+ the package.
+
+ .. versionadded:: 0.8
+ """
+ prefix, package_path = find_package(self.import_name)
+ if prefix is None:
+ return os.path.join(package_path, "instance")
+ return os.path.join(prefix, "var", self.name + "-instance")
+
+ def open_instance_resource(self, resource, mode="rb"):
+ """Opens a resource from the application's instance folder
+ (:attr:`instance_path`). Otherwise works like
+ :meth:`open_resource`. Instance resources can also be opened for
+ writing.
+
+ :param resource: the name of the resource. To access resources within
+ subfolders use forward slashes as separator.
+ :param mode: resource file opening mode, default is 'rb'.
+ """
+ return open(os.path.join(self.instance_path, resource), mode)
+
+ @property
+ def templates_auto_reload(self):
+ """Reload templates when they are changed. Used by
+ :meth:`create_jinja_environment`.
+
+ This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If
+ not set, it will be enabled in debug mode.
+
+ .. versionadded:: 1.0
+ This property was added but the underlying config and behavior
+ already existed.
+ """
+ rv = self.config["TEMPLATES_AUTO_RELOAD"]
+ return rv if rv is not None else self.debug
+
+ @templates_auto_reload.setter
+ def templates_auto_reload(self, value):
+ self.config["TEMPLATES_AUTO_RELOAD"] = value
+
+ def create_jinja_environment(self):
+ """Create the Jinja environment based on :attr:`jinja_options`
+ and the various Jinja-related methods of the app. Changing
+ :attr:`jinja_options` after this will have no effect. Also adds
+ Flask-related globals and filters to the environment.
+
+ .. versionchanged:: 0.11
+ ``Environment.auto_reload`` set in accordance with
+ ``TEMPLATES_AUTO_RELOAD`` configuration option.
+
+ .. versionadded:: 0.5
+ """
+ options = dict(self.jinja_options)
+
+ if "autoescape" not in options:
+ options["autoescape"] = self.select_jinja_autoescape
+
+ if "auto_reload" not in options:
+ options["auto_reload"] = self.templates_auto_reload
+
+ rv = self.jinja_environment(self, **options)
+ rv.globals.update(
+ url_for=url_for,
+ get_flashed_messages=get_flashed_messages,
+ config=self.config,
+ # request, session and g are normally added with the
+ # context processor for efficiency reasons but for imported
+ # templates we also want the proxies in there.
+ request=request,
+ session=session,
+ g=g,
+ )
+ rv.filters["tojson"] = json.tojson_filter
+ return rv
+
+ def create_global_jinja_loader(self):
+ """Creates the loader for the Jinja2 environment. Can be used to
+ override just the loader and keeping the rest unchanged. It's
+ discouraged to override this function. Instead one should override
+ the :meth:`jinja_loader` function instead.
+
+ The global loader dispatches between the loaders of the application
+ and the individual blueprints.
+
+ .. versionadded:: 0.7
+ """
+ return DispatchingJinjaLoader(self)
+
+ def select_jinja_autoescape(self, filename):
+ """Returns ``True`` if autoescaping should be active for the given
+ template name. If no template name is given, returns `True`.
+
+ .. versionadded:: 0.5
+ """
+ if filename is None:
+ return True
+ return filename.endswith((".html", ".htm", ".xml", ".xhtml"))
+
+ def update_template_context(self, context):
+ """Update the template context with some commonly used variables.
+ This injects request, session, config and g into the template
+ context as well as everything template context processors want
+ to inject. Note that the as of Flask 0.6, the original values
+ in the context will not be overridden if a context processor
+ decides to return a value with the same key.
+
+ :param context: the context as a dictionary that is updated in place
+ to add extra variables.
+ """
+ funcs = self.template_context_processors[None]
+ reqctx = _request_ctx_stack.top
+ if reqctx is not None:
+ bp = reqctx.request.blueprint
+ if bp is not None and bp in self.template_context_processors:
+ funcs = chain(funcs, self.template_context_processors[bp])
+ orig_ctx = context.copy()
+ for func in funcs:
+ context.update(func())
+ # make sure the original values win. This makes it possible to
+ # easier add new variables in context processors without breaking
+ # existing views.
+ context.update(orig_ctx)
+
+ def make_shell_context(self):
+ """Returns the shell context for an interactive shell for this
+ application. This runs all the registered shell context
+ processors.
+
+ .. versionadded:: 0.11
+ """
+ rv = {"app": self, "g": g}
+ for processor in self.shell_context_processors:
+ rv.update(processor())
+ return rv
+
+ #: What environment the app is running in. Flask and extensions may
+ #: enable behaviors based on the environment, such as enabling debug
+ #: mode. This maps to the :data:`ENV` config key. This is set by the
+ #: :envvar:`FLASK_ENV` environment variable and may not behave as
+ #: expected if set in code.
+ #:
+ #: **Do not enable development when deploying in production.**
+ #:
+ #: Default: ``'production'``
+ env = ConfigAttribute("ENV")
+
+ @property
+ def debug(self):
+ """Whether debug mode is enabled. When using ``flask run`` to start
+ the development server, an interactive debugger will be shown for
+ unhandled exceptions, and the server will be reloaded when code
+ changes. This maps to the :data:`DEBUG` config key. This is
+ enabled when :attr:`env` is ``'development'`` and is overridden
+ by the ``FLASK_DEBUG`` environment variable. It may not behave as
+ expected if set in code.
+
+ **Do not enable debug mode when deploying in production.**
+
+ Default: ``True`` if :attr:`env` is ``'development'``, or
+ ``False`` otherwise.
+ """
+ return self.config["DEBUG"]
+
+ @debug.setter
+ def debug(self, value):
+ self.config["DEBUG"] = value
+ self.jinja_env.auto_reload = self.templates_auto_reload
+
+ def run(self, host=None, port=None, debug=None, load_dotenv=True, **options):
+ """Runs the application on a local development server.
+
+ Do not use ``run()`` in a production setting. It is not intended to
+ meet security and performance requirements for a production server.
+ Instead, see :ref:`deployment` for WSGI server recommendations.
+
+ If the :attr:`debug` flag is set the server will automatically reload
+ for code changes and show a debugger in case an exception happened.
+
+ If you want to run the application in debug mode, but disable the
+ code execution on the interactive debugger, you can pass
+ ``use_evalex=False`` as parameter. This will keep the debugger's
+ traceback screen active, but disable code execution.
+
+ It is not recommended to use this function for development with
+ automatic reloading as this is badly supported. Instead you should
+ be using the :command:`flask` command line script's ``run`` support.
+
+ .. admonition:: Keep in Mind
+
+ Flask will suppress any server error with a generic error page
+ unless it is in debug mode. As such to enable just the
+ interactive debugger without the code reloading, you have to
+ invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``.
+ Setting ``use_debugger`` to ``True`` without being in debug mode
+ won't catch any exceptions because there won't be any to
+ catch.
+
+ :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to
+ have the server available externally as well. Defaults to
+ ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable
+ if present.
+ :param port: the port of the webserver. Defaults to ``5000`` or the
+ port defined in the ``SERVER_NAME`` config variable if present.
+ :param debug: if given, enable or disable debug mode. See
+ :attr:`debug`.
+ :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv`
+ files to set environment variables. Will also change the working
+ directory to the directory containing the first file found.
+ :param options: the options to be forwarded to the underlying Werkzeug
+ server. See :func:`werkzeug.serving.run_simple` for more
+ information.
+
+ .. versionchanged:: 1.0
+ If installed, python-dotenv will be used to load environment
+ variables from :file:`.env` and :file:`.flaskenv` files.
+
+ If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG`
+ environment variables will override :attr:`env` and
+ :attr:`debug`.
+
+ Threaded mode is enabled by default.
+
+ .. versionchanged:: 0.10
+ The default port is now picked from the ``SERVER_NAME``
+ variable.
+ """
+ # Change this into a no-op if the server is invoked from the
+ # command line. Have a look at cli.py for more information.
+ if os.environ.get("FLASK_RUN_FROM_CLI") == "true":
+ from .debughelpers import explain_ignored_app_run
+
+ explain_ignored_app_run()
+ return
+
+ if get_load_dotenv(load_dotenv):
+ cli.load_dotenv()
+
+ # if set, let env vars override previous values
+ if "FLASK_ENV" in os.environ:
+ self.env = get_env()
+ self.debug = get_debug_flag()
+ elif "FLASK_DEBUG" in os.environ:
+ self.debug = get_debug_flag()
+
+ # debug passed to method overrides all other sources
+ if debug is not None:
+ self.debug = bool(debug)
+
+ _host = "127.0.0.1"
+ _port = 5000
+ server_name = self.config.get("SERVER_NAME")
+ sn_host, sn_port = None, None
+
+ if server_name:
+ sn_host, _, sn_port = server_name.partition(":")
+
+ host = host or sn_host or _host
+ # pick the first value that's not None (0 is allowed)
+ port = int(next((p for p in (port, sn_port) if p is not None), _port))
+
+ options.setdefault("use_reloader", self.debug)
+ options.setdefault("use_debugger", self.debug)
+ options.setdefault("threaded", True)
+
+ cli.show_server_banner(self.env, self.debug, self.name, False)
+
+ from werkzeug.serving import run_simple
+
+ try:
+ run_simple(host, port, self, **options)
+ finally:
+ # reset the first request information if the development server
+ # reset normally. This makes it possible to restart the server
+ # without reloader and that stuff from an interactive shell.
+ self._got_first_request = False
+
+ def test_client(self, use_cookies=True, **kwargs):
+ """Creates a test client for this application. For information
+ about unit testing head over to :ref:`testing`.
+
+ Note that if you are testing for assertions or exceptions in your
+ application code, you must set ``app.testing = True`` in order for the
+ exceptions to propagate to the test client. Otherwise, the exception
+ will be handled by the application (not visible to the test client) and
+ the only indication of an AssertionError or other exception will be a
+ 500 status code response to the test client. See the :attr:`testing`
+ attribute. For example::
+
+ app.testing = True
+ client = app.test_client()
+
+ The test client can be used in a ``with`` block to defer the closing down
+ of the context until the end of the ``with`` block. This is useful if
+ you want to access the context locals for testing::
+
+ with app.test_client() as c:
+ rv = c.get('/?vodka=42')
+ assert request.args['vodka'] == '42'
+
+ Additionally, you may pass optional keyword arguments that will then
+ be passed to the application's :attr:`test_client_class` constructor.
+ For example::
+
+ from flask.testing import FlaskClient
+
+ class CustomClient(FlaskClient):
+ def __init__(self, *args, **kwargs):
+ self._authentication = kwargs.pop("authentication")
+ super(CustomClient,self).__init__( *args, **kwargs)
+
+ app.test_client_class = CustomClient
+ client = app.test_client(authentication='Basic ....')
+
+ See :class:`~flask.testing.FlaskClient` for more information.
+
+ .. versionchanged:: 0.4
+ added support for ``with`` block usage for the client.
+
+ .. versionadded:: 0.7
+ The `use_cookies` parameter was added as well as the ability
+ to override the client to be used by setting the
+ :attr:`test_client_class` attribute.
+
+ .. versionchanged:: 0.11
+ Added `**kwargs` to support passing additional keyword arguments to
+ the constructor of :attr:`test_client_class`.
+ """
+ cls = self.test_client_class
+ if cls is None:
+ from .testing import FlaskClient as cls
+ return cls(self, self.response_class, use_cookies=use_cookies, **kwargs)
+
+ def test_cli_runner(self, **kwargs):
+ """Create a CLI runner for testing CLI commands.
+ See :ref:`testing-cli`.
+
+ Returns an instance of :attr:`test_cli_runner_class`, by default
+ :class:`~flask.testing.FlaskCliRunner`. The Flask app object is
+ passed as the first argument.
+
+ .. versionadded:: 1.0
+ """
+ cls = self.test_cli_runner_class
+
+ if cls is None:
+ from .testing import FlaskCliRunner as cls
+
+ return cls(self, **kwargs)
+
+ def open_session(self, request):
+ """Creates or opens a new session. Default implementation stores all
+ session data in a signed cookie. This requires that the
+ :attr:`secret_key` is set. Instead of overriding this method
+ we recommend replacing the :class:`session_interface`.
+
+ .. deprecated: 1.0
+ Will be removed in 2.0. Use
+ ``session_interface.open_session`` instead.
+
+ :param request: an instance of :attr:`request_class`.
+ """
+
+ warnings.warn(
+ DeprecationWarning(
+ '"open_session" is deprecated and will be removed in'
+ ' 2.0. Use "session_interface.open_session" instead.'
+ )
+ )
+ return self.session_interface.open_session(self, request)
+
+ def save_session(self, session, response):
+ """Saves the session if it needs updates. For the default
+ implementation, check :meth:`open_session`. Instead of overriding this
+ method we recommend replacing the :class:`session_interface`.
+
+ .. deprecated: 1.0
+ Will be removed in 2.0. Use
+ ``session_interface.save_session`` instead.
+
+ :param session: the session to be saved (a
+ :class:`~werkzeug.contrib.securecookie.SecureCookie`
+ object)
+ :param response: an instance of :attr:`response_class`
+ """
+
+ warnings.warn(
+ DeprecationWarning(
+ '"save_session" is deprecated and will be removed in'
+ ' 2.0. Use "session_interface.save_session" instead.'
+ )
+ )
+ return self.session_interface.save_session(self, session, response)
+
+ def make_null_session(self):
+ """Creates a new instance of a missing session. Instead of overriding
+ this method we recommend replacing the :class:`session_interface`.
+
+ .. deprecated: 1.0
+ Will be removed in 2.0. Use
+ ``session_interface.make_null_session`` instead.
+
+ .. versionadded:: 0.7
+ """
+
+ warnings.warn(
+ DeprecationWarning(
+ '"make_null_session" is deprecated and will be removed'
+ ' in 2.0. Use "session_interface.make_null_session"'
+ " instead."
+ )
+ )
+ return self.session_interface.make_null_session(self)
+
+ @setupmethod
+ def register_blueprint(self, blueprint, **options):
+ """Register a :class:`~flask.Blueprint` on the application. Keyword
+ arguments passed to this method will override the defaults set on the
+ blueprint.
+
+ Calls the blueprint's :meth:`~flask.Blueprint.register` method after
+ recording the blueprint in the application's :attr:`blueprints`.
+
+ :param blueprint: The blueprint to register.
+ :param url_prefix: Blueprint routes will be prefixed with this.
+ :param subdomain: Blueprint routes will match on this subdomain.
+ :param url_defaults: Blueprint routes will use these default values for
+ view arguments.
+ :param options: Additional keyword arguments are passed to
+ :class:`~flask.blueprints.BlueprintSetupState`. They can be
+ accessed in :meth:`~flask.Blueprint.record` callbacks.
+
+ .. versionadded:: 0.7
+ """
+ first_registration = False
+
+ if blueprint.name in self.blueprints:
+ assert self.blueprints[blueprint.name] is blueprint, (
+ "A name collision occurred between blueprints %r and %r. Both"
+ ' share the same name "%s". Blueprints that are created on the'
+ " fly need unique names."
+ % (blueprint, self.blueprints[blueprint.name], blueprint.name)
+ )
+ else:
+ self.blueprints[blueprint.name] = blueprint
+ self._blueprint_order.append(blueprint)
+ first_registration = True
+
+ blueprint.register(self, options, first_registration)
+
+ def iter_blueprints(self):
+ """Iterates over all blueprints by the order they were registered.
+
+ .. versionadded:: 0.11
+ """
+ return iter(self._blueprint_order)
+
+ @setupmethod
+ def add_url_rule(
+ self,
+ rule,
+ endpoint=None,
+ view_func=None,
+ provide_automatic_options=None,
+ **options
+ ):
+ """Connects a URL rule. Works exactly like the :meth:`route`
+ decorator. If a view_func is provided it will be registered with the
+ endpoint.
+
+ Basically this example::
+
+ @app.route('/')
+ def index():
+ pass
+
+ Is equivalent to the following::
+
+ def index():
+ pass
+ app.add_url_rule('/', 'index', index)
+
+ If the view_func is not provided you will need to connect the endpoint
+ to a view function like so::
+
+ app.view_functions['index'] = index
+
+ Internally :meth:`route` invokes :meth:`add_url_rule` so if you want
+ to customize the behavior via subclassing you only need to change
+ this method.
+
+ For more information refer to :ref:`url-route-registrations`.
+
+ .. versionchanged:: 0.2
+ `view_func` parameter added.
+
+ .. versionchanged:: 0.6
+ ``OPTIONS`` is added automatically as method.
+
+ :param rule: the URL rule as string
+ :param endpoint: the endpoint for the registered URL rule. Flask
+ itself assumes the name of the view function as
+ endpoint
+ :param view_func: the function to call when serving a request to the
+ provided endpoint
+ :param provide_automatic_options: controls whether the ``OPTIONS``
+ method should be added automatically. This can also be controlled
+ by setting the ``view_func.provide_automatic_options = False``
+ before adding the rule.
+ :param options: the options to be forwarded to the underlying
+ :class:`~werkzeug.routing.Rule` object. A change
+ to Werkzeug is handling of method options. methods
+ is a list of methods this rule should be limited
+ to (``GET``, ``POST`` etc.). By default a rule
+ just listens for ``GET`` (and implicitly ``HEAD``).
+ Starting with Flask 0.6, ``OPTIONS`` is implicitly
+ added and handled by the standard request handling.
+ """
+ if endpoint is None:
+ endpoint = _endpoint_from_view_func(view_func)
+ options["endpoint"] = endpoint
+ methods = options.pop("methods", None)
+
+ # if the methods are not given and the view_func object knows its
+ # methods we can use that instead. If neither exists, we go with
+ # a tuple of only ``GET`` as default.
+ if methods is None:
+ methods = getattr(view_func, "methods", None) or ("GET",)
+ if isinstance(methods, string_types):
+ raise TypeError(
+ "Allowed methods have to be iterables of strings, "
+ 'for example: @app.route(..., methods=["POST"])'
+ )
+ methods = set(item.upper() for item in methods)
+
+ # Methods that should always be added
+ required_methods = set(getattr(view_func, "required_methods", ()))
+
+ # starting with Flask 0.8 the view_func object can disable and
+ # force-enable the automatic options handling.
+ if provide_automatic_options is None:
+ provide_automatic_options = getattr(
+ view_func, "provide_automatic_options", None
+ )
+
+ if provide_automatic_options is None:
+ if "OPTIONS" not in methods:
+ provide_automatic_options = True
+ required_methods.add("OPTIONS")
+ else:
+ provide_automatic_options = False
+
+ # Add the required methods now.
+ methods |= required_methods
+
+ rule = self.url_rule_class(rule, methods=methods, **options)
+ rule.provide_automatic_options = provide_automatic_options
+
+ self.url_map.add(rule)
+ if view_func is not None:
+ old_func = self.view_functions.get(endpoint)
+ if old_func is not None and old_func != view_func:
+ raise AssertionError(
+ "View function mapping is overwriting an "
+ "existing endpoint function: %s" % endpoint
+ )
+ self.view_functions[endpoint] = view_func
+
+ def route(self, rule, **options):
+ """A decorator that is used to register a view function for a
+ given URL rule. This does the same thing as :meth:`add_url_rule`
+ but is intended for decorator usage::
+
+ @app.route('/')
+ def index():
+ return 'Hello World'
+
+ For more information refer to :ref:`url-route-registrations`.
+
+ :param rule: the URL rule as string
+ :param endpoint: the endpoint for the registered URL rule. Flask
+ itself assumes the name of the view function as
+ endpoint
+ :param options: the options to be forwarded to the underlying
+ :class:`~werkzeug.routing.Rule` object. A change
+ to Werkzeug is handling of method options. methods
+ is a list of methods this rule should be limited
+ to (``GET``, ``POST`` etc.). By default a rule
+ just listens for ``GET`` (and implicitly ``HEAD``).
+ Starting with Flask 0.6, ``OPTIONS`` is implicitly
+ added and handled by the standard request handling.
+ """
+
+ def decorator(f):
+ endpoint = options.pop("endpoint", None)
+ self.add_url_rule(rule, endpoint, f, **options)
+ return f
+
+ return decorator
+
+ @setupmethod
+ def endpoint(self, endpoint):
+ """A decorator to register a function as an endpoint.
+ Example::
+
+ @app.endpoint('example.endpoint')
+ def example():
+ return "example"
+
+ :param endpoint: the name of the endpoint
+ """
+
+ def decorator(f):
+ self.view_functions[endpoint] = f
+ return f
+
+ return decorator
+
+ @staticmethod
+ def _get_exc_class_and_code(exc_class_or_code):
+ """Get the exception class being handled. For HTTP status codes
+ or ``HTTPException`` subclasses, return both the exception and
+ status code.
+
+ :param exc_class_or_code: Any exception class, or an HTTP status
+ code as an integer.
+ """
+ if isinstance(exc_class_or_code, integer_types):
+ exc_class = default_exceptions[exc_class_or_code]
+ else:
+ exc_class = exc_class_or_code
+
+ assert issubclass(exc_class, Exception)
+
+ if issubclass(exc_class, HTTPException):
+ return exc_class, exc_class.code
+ else:
+ return exc_class, None
+
+ @setupmethod
+ def errorhandler(self, code_or_exception):
+ """Register a function to handle errors by code or exception class.
+
+ A decorator that is used to register a function given an
+ error code. Example::
+
+ @app.errorhandler(404)
+ def page_not_found(error):
+ return 'This page does not exist', 404
+
+ You can also register handlers for arbitrary exceptions::
+
+ @app.errorhandler(DatabaseError)
+ def special_exception_handler(error):
+ return 'Database connection failed', 500
+
+ .. versionadded:: 0.7
+ Use :meth:`register_error_handler` instead of modifying
+ :attr:`error_handler_spec` directly, for application wide error
+ handlers.
+
+ .. versionadded:: 0.7
+ One can now additionally also register custom exception types
+ that do not necessarily have to be a subclass of the
+ :class:`~werkzeug.exceptions.HTTPException` class.
+
+ :param code_or_exception: the code as integer for the handler, or
+ an arbitrary exception
+ """
+
+ def decorator(f):
+ self._register_error_handler(None, code_or_exception, f)
+ return f
+
+ return decorator
+
+ @setupmethod
+ def register_error_handler(self, code_or_exception, f):
+ """Alternative error attach function to the :meth:`errorhandler`
+ decorator that is more straightforward to use for non decorator
+ usage.
+
+ .. versionadded:: 0.7
+ """
+ self._register_error_handler(None, code_or_exception, f)
+
+ @setupmethod
+ def _register_error_handler(self, key, code_or_exception, f):
+ """
+ :type key: None|str
+ :type code_or_exception: int|T<=Exception
+ :type f: callable
+ """
+ if isinstance(code_or_exception, HTTPException): # old broken behavior
+ raise ValueError(
+ "Tried to register a handler for an exception instance {0!r}."
+ " Handlers can only be registered for exception classes or"
+ " HTTP error codes.".format(code_or_exception)
+ )
+
+ try:
+ exc_class, code = self._get_exc_class_and_code(code_or_exception)
+ except KeyError:
+ raise KeyError(
+ "'{0}' is not a recognized HTTP error code. Use a subclass of"
+ " HTTPException with that code instead.".format(code_or_exception)
+ )
+
+ handlers = self.error_handler_spec.setdefault(key, {}).setdefault(code, {})
+ handlers[exc_class] = f
+
+ @setupmethod
+ def template_filter(self, name=None):
+ """A decorator that is used to register custom template filter.
+ You can specify a name for the filter, otherwise the function
+ name will be used. Example::
+
+ @app.template_filter()
+ def reverse(s):
+ return s[::-1]
+
+ :param name: the optional name of the filter, otherwise the
+ function name will be used.
+ """
+
+ def decorator(f):
+ self.add_template_filter(f, name=name)
+ return f
+
+ return decorator
+
+ @setupmethod
+ def add_template_filter(self, f, name=None):
+ """Register a custom template filter. Works exactly like the
+ :meth:`template_filter` decorator.
+
+ :param name: the optional name of the filter, otherwise the
+ function name will be used.
+ """
+ self.jinja_env.filters[name or f.__name__] = f
+
+ @setupmethod
+ def template_test(self, name=None):
+ """A decorator that is used to register custom template test.
+ You can specify a name for the test, otherwise the function
+ name will be used. Example::
+
+ @app.template_test()
+ def is_prime(n):
+ if n == 2:
+ return True
+ for i in range(2, int(math.ceil(math.sqrt(n))) + 1):
+ if n % i == 0:
+ return False
+ return True
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the test, otherwise the
+ function name will be used.
+ """
+
+ def decorator(f):
+ self.add_template_test(f, name=name)
+ return f
+
+ return decorator
+
+ @setupmethod
+ def add_template_test(self, f, name=None):
+ """Register a custom template test. Works exactly like the
+ :meth:`template_test` decorator.
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the test, otherwise the
+ function name will be used.
+ """
+ self.jinja_env.tests[name or f.__name__] = f
+
+ @setupmethod
+ def template_global(self, name=None):
+ """A decorator that is used to register a custom template global function.
+ You can specify a name for the global function, otherwise the function
+ name will be used. Example::
+
+ @app.template_global()
+ def double(n):
+ return 2 * n
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the global function, otherwise the
+ function name will be used.
+ """
+
+ def decorator(f):
+ self.add_template_global(f, name=name)
+ return f
+
+ return decorator
+
+ @setupmethod
+ def add_template_global(self, f, name=None):
+ """Register a custom template global function. Works exactly like the
+ :meth:`template_global` decorator.
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the global function, otherwise the
+ function name will be used.
+ """
+ self.jinja_env.globals[name or f.__name__] = f
+
+ @setupmethod
+ def before_request(self, f):
+ """Registers a function to run before each request.
+
+ For example, this can be used to open a database connection, or to load
+ the logged in user from the session.
+
+ The function will be called without any arguments. If it returns a
+ non-None value, the value is handled as if it was the return value from
+ the view, and further request handling is stopped.
+ """
+ self.before_request_funcs.setdefault(None, []).append(f)
+ return f
+
+ @setupmethod
+ def before_first_request(self, f):
+ """Registers a function to be run before the first request to this
+ instance of the application.
+
+ The function will be called without any arguments and its return
+ value is ignored.
+
+ .. versionadded:: 0.8
+ """
+ self.before_first_request_funcs.append(f)
+ return f
+
+ @setupmethod
+ def after_request(self, f):
+ """Register a function to be run after each request.
+
+ Your function must take one parameter, an instance of
+ :attr:`response_class` and return a new response object or the
+ same (see :meth:`process_response`).
+
+ As of Flask 0.7 this function might not be executed at the end of the
+ request in case an unhandled exception occurred.
+ """
+ self.after_request_funcs.setdefault(None, []).append(f)
+ return f
+
+ @setupmethod
+ def teardown_request(self, f):
+ """Register a function to be run at the end of each request,
+ regardless of whether there was an exception or not. These functions
+ are executed when the request context is popped, even if not an
+ actual request was performed.
+
+ Example::
+
+ ctx = app.test_request_context()
+ ctx.push()
+ ...
+ ctx.pop()
+
+ When ``ctx.pop()`` is executed in the above example, the teardown
+ functions are called just before the request context moves from the
+ stack of active contexts. This becomes relevant if you are using
+ such constructs in tests.
+
+ Generally teardown functions must take every necessary step to avoid
+ that they will fail. If they do execute code that might fail they
+ will have to surround the execution of these code by try/except
+ statements and log occurring errors.
+
+ When a teardown function was called because of an exception it will
+ be passed an error object.
+
+ The return values of teardown functions are ignored.
+
+ .. admonition:: Debug Note
+
+ In debug mode Flask will not tear down a request on an exception
+ immediately. Instead it will keep it alive so that the interactive
+ debugger can still access it. This behavior can be controlled
+ by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable.
+ """
+ self.teardown_request_funcs.setdefault(None, []).append(f)
+ return f
+
+ @setupmethod
+ def teardown_appcontext(self, f):
+ """Registers a function to be called when the application context
+ ends. These functions are typically also called when the request
+ context is popped.
+
+ Example::
+
+ ctx = app.app_context()
+ ctx.push()
+ ...
+ ctx.pop()
+
+ When ``ctx.pop()`` is executed in the above example, the teardown
+ functions are called just before the app context moves from the
+ stack of active contexts. This becomes relevant if you are using
+ such constructs in tests.
+
+ Since a request context typically also manages an application
+ context it would also be called when you pop a request context.
+
+ When a teardown function was called because of an unhandled exception
+ it will be passed an error object. If an :meth:`errorhandler` is
+ registered, it will handle the exception and the teardown will not
+ receive it.
+
+ The return values of teardown functions are ignored.
+
+ .. versionadded:: 0.9
+ """
+ self.teardown_appcontext_funcs.append(f)
+ return f
+
+ @setupmethod
+ def context_processor(self, f):
+ """Registers a template context processor function."""
+ self.template_context_processors[None].append(f)
+ return f
+
+ @setupmethod
+ def shell_context_processor(self, f):
+ """Registers a shell context processor function.
+
+ .. versionadded:: 0.11
+ """
+ self.shell_context_processors.append(f)
+ return f
+
+ @setupmethod
+ def url_value_preprocessor(self, f):
+ """Register a URL value preprocessor function for all view
+ functions in the application. These functions will be called before the
+ :meth:`before_request` functions.
+
+ The function can modify the values captured from the matched url before
+ they are passed to the view. For example, this can be used to pop a
+ common language code value and place it in ``g`` rather than pass it to
+ every view.
+
+ The function is passed the endpoint name and values dict. The return
+ value is ignored.
+ """
+ self.url_value_preprocessors.setdefault(None, []).append(f)
+ return f
+
+ @setupmethod
+ def url_defaults(self, f):
+ """Callback function for URL defaults for all view functions of the
+ application. It's called with the endpoint and values and should
+ update the values passed in place.
+ """
+ self.url_default_functions.setdefault(None, []).append(f)
+ return f
+
+ def _find_error_handler(self, e):
+ """Return a registered error handler for an exception in this order:
+ blueprint handler for a specific code, app handler for a specific code,
+ blueprint handler for an exception class, app handler for an exception
+ class, or ``None`` if a suitable handler is not found.
+ """
+ exc_class, code = self._get_exc_class_and_code(type(e))
+
+ for name, c in (
+ (request.blueprint, code),
+ (None, code),
+ (request.blueprint, None),
+ (None, None),
+ ):
+ handler_map = self.error_handler_spec.setdefault(name, {}).get(c)
+
+ if not handler_map:
+ continue
+
+ for cls in exc_class.__mro__:
+ handler = handler_map.get(cls)
+
+ if handler is not None:
+ return handler
+
+ def handle_http_exception(self, e):
+ """Handles an HTTP exception. By default this will invoke the
+ registered error handlers and fall back to returning the
+ exception as response.
+
+ .. versionchanged:: 1.0.3
+ ``RoutingException``, used internally for actions such as
+ slash redirects during routing, is not passed to error
+ handlers.
+
+ .. versionchanged:: 1.0
+ Exceptions are looked up by code *and* by MRO, so
+ ``HTTPExcpetion`` subclasses can be handled with a catch-all
+ handler for the base ``HTTPException``.
+
+ .. versionadded:: 0.3
+ """
+ # Proxy exceptions don't have error codes. We want to always return
+ # those unchanged as errors
+ if e.code is None:
+ return e
+
+ # RoutingExceptions are used internally to trigger routing
+ # actions, such as slash redirects raising RequestRedirect. They
+ # are not raised or handled in user code.
+ if isinstance(e, RoutingException):
+ return e
+
+ handler = self._find_error_handler(e)
+ if handler is None:
+ return e
+ return handler(e)
+
+ def trap_http_exception(self, e):
+ """Checks if an HTTP exception should be trapped or not. By default
+ this will return ``False`` for all exceptions except for a bad request
+ key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It
+ also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``.
+
+ This is called for all HTTP exceptions raised by a view function.
+ If it returns ``True`` for any exception the error handler for this
+ exception is not called and it shows up as regular exception in the
+ traceback. This is helpful for debugging implicitly raised HTTP
+ exceptions.
+
+ .. versionchanged:: 1.0
+ Bad request errors are not trapped by default in debug mode.
+
+ .. versionadded:: 0.8
+ """
+ if self.config["TRAP_HTTP_EXCEPTIONS"]:
+ return True
+
+ trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"]
+
+ # if unset, trap key errors in debug mode
+ if (
+ trap_bad_request is None
+ and self.debug
+ and isinstance(e, BadRequestKeyError)
+ ):
+ return True
+
+ if trap_bad_request:
+ return isinstance(e, BadRequest)
+
+ return False
+
+ def handle_user_exception(self, e):
+ """This method is called whenever an exception occurs that
+ should be handled. A special case is :class:`~werkzeug
+ .exceptions.HTTPException` which is forwarded to the
+ :meth:`handle_http_exception` method. This function will either
+ return a response value or reraise the exception with the same
+ traceback.
+
+ .. versionchanged:: 1.0
+ Key errors raised from request data like ``form`` show the
+ bad key in debug mode rather than a generic bad request
+ message.
+
+ .. versionadded:: 0.7
+ """
+ exc_type, exc_value, tb = sys.exc_info()
+ assert exc_value is e
+ # ensure not to trash sys.exc_info() at that point in case someone
+ # wants the traceback preserved in handle_http_exception. Of course
+ # we cannot prevent users from trashing it themselves in a custom
+ # trap_http_exception method so that's their fault then.
+
+ if isinstance(e, BadRequestKeyError):
+ if self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"]:
+ e.show_exception = True
+
+ # Werkzeug < 0.15 doesn't add the KeyError to the 400
+ # message, add it in manually.
+ # TODO: clean up once Werkzeug >= 0.15.5 is required
+ if e.args[0] not in e.get_description():
+ e.description = "KeyError: '{}'".format(*e.args)
+ elif not hasattr(BadRequestKeyError, "show_exception"):
+ e.args = ()
+
+ if isinstance(e, HTTPException) and not self.trap_http_exception(e):
+ return self.handle_http_exception(e)
+
+ handler = self._find_error_handler(e)
+
+ if handler is None:
+ reraise(exc_type, exc_value, tb)
+ return handler(e)
+
+ def handle_exception(self, e):
+ """Handle an exception that did not have an error handler
+ associated with it, or that was raised from an error handler.
+ This always causes a 500 ``InternalServerError``.
+
+ Always sends the :data:`got_request_exception` signal.
+
+ If :attr:`propagate_exceptions` is ``True``, such as in debug
+ mode, the error will be re-raised so that the debugger can
+ display it. Otherwise, the original exception is logged, and
+ an :exc:`~werkzeug.exceptions.InternalServerError` is returned.
+
+ If an error handler is registered for ``InternalServerError`` or
+ ``500``, it will be used. For consistency, the handler will
+ always receive the ``InternalServerError``. The original
+ unhandled exception is available as ``e.original_exception``.
+
+ .. note::
+ Prior to Werkzeug 1.0.0, ``InternalServerError`` will not
+ always have an ``original_exception`` attribute. Use
+ ``getattr(e, "original_exception", None)`` to simulate the
+ behavior for compatibility.
+
+ .. versionchanged:: 1.1.0
+ Always passes the ``InternalServerError`` instance to the
+ handler, setting ``original_exception`` to the unhandled
+ error.
+
+ .. versionchanged:: 1.1.0
+ ``after_request`` functions and other finalization is done
+ even for the default 500 response when there is no handler.
+
+ .. versionadded:: 0.3
+ """
+ exc_type, exc_value, tb = sys.exc_info()
+ got_request_exception.send(self, exception=e)
+
+ if self.propagate_exceptions:
+ # if we want to repropagate the exception, we can attempt to
+ # raise it with the whole traceback in case we can do that
+ # (the function was actually called from the except part)
+ # otherwise, we just raise the error again
+ if exc_value is e:
+ reraise(exc_type, exc_value, tb)
+ else:
+ raise e
+
+ self.log_exception((exc_type, exc_value, tb))
+ server_error = InternalServerError()
+ # TODO: pass as param when Werkzeug>=1.0.0 is required
+ # TODO: also remove note about this from docstring and docs
+ server_error.original_exception = e
+ handler = self._find_error_handler(server_error)
+
+ if handler is not None:
+ server_error = handler(server_error)
+
+ return self.finalize_request(server_error, from_error_handler=True)
+
+ def log_exception(self, exc_info):
+ """Logs an exception. This is called by :meth:`handle_exception`
+ if debugging is disabled and right before the handler is called.
+ The default implementation logs the exception as error on the
+ :attr:`logger`.
+
+ .. versionadded:: 0.8
+ """
+ self.logger.error(
+ "Exception on %s [%s]" % (request.path, request.method), exc_info=exc_info
+ )
+
+ def raise_routing_exception(self, request):
+ """Exceptions that are recording during routing are reraised with
+ this method. During debug we are not reraising redirect requests
+ for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising
+ a different error instead to help debug situations.
+
+ :internal:
+ """
+ if (
+ not self.debug
+ or not isinstance(request.routing_exception, RequestRedirect)
+ or request.method in ("GET", "HEAD", "OPTIONS")
+ ):
+ raise request.routing_exception
+
+ from .debughelpers import FormDataRoutingRedirect
+
+ raise FormDataRoutingRedirect(request)
+
+ def dispatch_request(self):
+ """Does the request dispatching. Matches the URL and returns the
+ return value of the view or error handler. This does not have to
+ be a response object. In order to convert the return value to a
+ proper response object, call :func:`make_response`.
+
+ .. versionchanged:: 0.7
+ This no longer does the exception handling, this code was
+ moved to the new :meth:`full_dispatch_request`.
+ """
+ req = _request_ctx_stack.top.request
+ if req.routing_exception is not None:
+ self.raise_routing_exception(req)
+ rule = req.url_rule
+ # if we provide automatic options for this URL and the
+ # request came with the OPTIONS method, reply automatically
+ if (
+ getattr(rule, "provide_automatic_options", False)
+ and req.method == "OPTIONS"
+ ):
+ return self.make_default_options_response()
+ # otherwise dispatch to the handler for that endpoint
+ return self.view_functions[rule.endpoint](**req.view_args)
+
+ def full_dispatch_request(self):
+ """Dispatches the request and on top of that performs request
+ pre and postprocessing as well as HTTP exception catching and
+ error handling.
+
+ .. versionadded:: 0.7
+ """
+ self.try_trigger_before_first_request_functions()
+ try:
+ request_started.send(self)
+ rv = self.preprocess_request()
+ if rv is None:
+ rv = self.dispatch_request()
+ except Exception as e:
+ rv = self.handle_user_exception(e)
+ return self.finalize_request(rv)
+
+ def finalize_request(self, rv, from_error_handler=False):
+ """Given the return value from a view function this finalizes
+ the request by converting it into a response and invoking the
+ postprocessing functions. This is invoked for both normal
+ request dispatching as well as error handlers.
+
+ Because this means that it might be called as a result of a
+ failure a special safe mode is available which can be enabled
+ with the `from_error_handler` flag. If enabled, failures in
+ response processing will be logged and otherwise ignored.
+
+ :internal:
+ """
+ response = self.make_response(rv)
+ try:
+ response = self.process_response(response)
+ request_finished.send(self, response=response)
+ except Exception:
+ if not from_error_handler:
+ raise
+ self.logger.exception(
+ "Request finalizing failed with an error while handling an error"
+ )
+ return response
+
+ def try_trigger_before_first_request_functions(self):
+ """Called before each request and will ensure that it triggers
+ the :attr:`before_first_request_funcs` and only exactly once per
+ application instance (which means process usually).
+
+ :internal:
+ """
+ if self._got_first_request:
+ return
+ with self._before_request_lock:
+ if self._got_first_request:
+ return
+ for func in self.before_first_request_funcs:
+ func()
+ self._got_first_request = True
+
+ def make_default_options_response(self):
+ """This method is called to create the default ``OPTIONS`` response.
+ This can be changed through subclassing to change the default
+ behavior of ``OPTIONS`` responses.
+
+ .. versionadded:: 0.7
+ """
+ adapter = _request_ctx_stack.top.url_adapter
+ if hasattr(adapter, "allowed_methods"):
+ methods = adapter.allowed_methods()
+ else:
+ # fallback for Werkzeug < 0.7
+ methods = []
+ try:
+ adapter.match(method="--")
+ except MethodNotAllowed as e:
+ methods = e.valid_methods
+ except HTTPException:
+ pass
+ rv = self.response_class()
+ rv.allow.update(methods)
+ return rv
+
+ def should_ignore_error(self, error):
+ """This is called to figure out if an error should be ignored
+ or not as far as the teardown system is concerned. If this
+ function returns ``True`` then the teardown handlers will not be
+ passed the error.
+
+ .. versionadded:: 0.10
+ """
+ return False
+
+ def make_response(self, rv):
+ """Convert the return value from a view function to an instance of
+ :attr:`response_class`.
+
+ :param rv: the return value from the view function. The view function
+ must return a response. Returning ``None``, or the view ending
+ without returning, is not allowed. The following types are allowed
+ for ``view_rv``:
+
+ ``str`` (``unicode`` in Python 2)
+ A response object is created with the string encoded to UTF-8
+ as the body.
+
+ ``bytes`` (``str`` in Python 2)
+ A response object is created with the bytes as the body.
+
+ ``dict``
+ A dictionary that will be jsonify'd before being returned.
+
+ ``tuple``
+ Either ``(body, status, headers)``, ``(body, status)``, or
+ ``(body, headers)``, where ``body`` is any of the other types
+ allowed here, ``status`` is a string or an integer, and
+ ``headers`` is a dictionary or a list of ``(key, value)``
+ tuples. If ``body`` is a :attr:`response_class` instance,
+ ``status`` overwrites the exiting value and ``headers`` are
+ extended.
+
+ :attr:`response_class`
+ The object is returned unchanged.
+
+ other :class:`~werkzeug.wrappers.Response` class
+ The object is coerced to :attr:`response_class`.
+
+ :func:`callable`
+ The function is called as a WSGI application. The result is
+ used to create a response object.
+
+ .. versionchanged:: 0.9
+ Previously a tuple was interpreted as the arguments for the
+ response object.
+ """
+
+ status = headers = None
+
+ # unpack tuple returns
+ if isinstance(rv, tuple):
+ len_rv = len(rv)
+
+ # a 3-tuple is unpacked directly
+ if len_rv == 3:
+ rv, status, headers = rv
+ # decide if a 2-tuple has status or headers
+ elif len_rv == 2:
+ if isinstance(rv[1], (Headers, dict, tuple, list)):
+ rv, headers = rv
+ else:
+ rv, status = rv
+ # other sized tuples are not allowed
+ else:
+ raise TypeError(
+ "The view function did not return a valid response tuple."
+ " The tuple must have the form (body, status, headers),"
+ " (body, status), or (body, headers)."
+ )
+
+ # the body must not be None
+ if rv is None:
+ raise TypeError(
+ "The view function did not return a valid response. The"
+ " function either returned None or ended without a return"
+ " statement."
+ )
+
+ # make sure the body is an instance of the response class
+ if not isinstance(rv, self.response_class):
+ if isinstance(rv, (text_type, bytes, bytearray)):
+ # let the response class set the status and headers instead of
+ # waiting to do it manually, so that the class can handle any
+ # special logic
+ rv = self.response_class(rv, status=status, headers=headers)
+ status = headers = None
+ elif isinstance(rv, dict):
+ rv = jsonify(rv)
+ elif isinstance(rv, BaseResponse) or callable(rv):
+ # evaluate a WSGI callable, or coerce a different response
+ # class to the correct type
+ try:
+ rv = self.response_class.force_type(rv, request.environ)
+ except TypeError as e:
+ new_error = TypeError(
+ "{e}\nThe view function did not return a valid"
+ " response. The return type must be a string, dict, tuple,"
+ " Response instance, or WSGI callable, but it was a"
+ " {rv.__class__.__name__}.".format(e=e, rv=rv)
+ )
+ reraise(TypeError, new_error, sys.exc_info()[2])
+ else:
+ raise TypeError(
+ "The view function did not return a valid"
+ " response. The return type must be a string, dict, tuple,"
+ " Response instance, or WSGI callable, but it was a"
+ " {rv.__class__.__name__}.".format(rv=rv)
+ )
+
+ # prefer the status if it was provided
+ if status is not None:
+ if isinstance(status, (text_type, bytes, bytearray)):
+ rv.status = status
+ else:
+ rv.status_code = status
+
+ # extend existing headers with provided headers
+ if headers:
+ rv.headers.extend(headers)
+
+ return rv
+
+ def create_url_adapter(self, request):
+ """Creates a URL adapter for the given request. The URL adapter
+ is created at a point where the request context is not yet set
+ up so the request is passed explicitly.
+
+ .. versionadded:: 0.6
+
+ .. versionchanged:: 0.9
+ This can now also be called without a request object when the
+ URL adapter is created for the application context.
+
+ .. versionchanged:: 1.0
+ :data:`SERVER_NAME` no longer implicitly enables subdomain
+ matching. Use :attr:`subdomain_matching` instead.
+ """
+ if request is not None:
+ # If subdomain matching is disabled (the default), use the
+ # default subdomain in all cases. This should be the default
+ # in Werkzeug but it currently does not have that feature.
+ subdomain = (
+ (self.url_map.default_subdomain or None)
+ if not self.subdomain_matching
+ else None
+ )
+ return self.url_map.bind_to_environ(
+ request.environ,
+ server_name=self.config["SERVER_NAME"],
+ subdomain=subdomain,
+ )
+ # We need at the very least the server name to be set for this
+ # to work.
+ if self.config["SERVER_NAME"] is not None:
+ return self.url_map.bind(
+ self.config["SERVER_NAME"],
+ script_name=self.config["APPLICATION_ROOT"],
+ url_scheme=self.config["PREFERRED_URL_SCHEME"],
+ )
+
+ def inject_url_defaults(self, endpoint, values):
+ """Injects the URL defaults for the given endpoint directly into
+ the values dictionary passed. This is used internally and
+ automatically called on URL building.
+
+ .. versionadded:: 0.7
+ """
+ funcs = self.url_default_functions.get(None, ())
+ if "." in endpoint:
+ bp = endpoint.rsplit(".", 1)[0]
+ funcs = chain(funcs, self.url_default_functions.get(bp, ()))
+ for func in funcs:
+ func(endpoint, values)
+
+ def handle_url_build_error(self, error, endpoint, values):
+ """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`.
+ """
+ exc_type, exc_value, tb = sys.exc_info()
+ for handler in self.url_build_error_handlers:
+ try:
+ rv = handler(error, endpoint, values)
+ if rv is not None:
+ return rv
+ except BuildError as e:
+ # make error available outside except block (py3)
+ error = e
+
+ # At this point we want to reraise the exception. If the error is
+ # still the same one we can reraise it with the original traceback,
+ # otherwise we raise it from here.
+ if error is exc_value:
+ reraise(exc_type, exc_value, tb)
+ raise error
+
+ def preprocess_request(self):
+ """Called before the request is dispatched. Calls
+ :attr:`url_value_preprocessors` registered with the app and the
+ current blueprint (if any). Then calls :attr:`before_request_funcs`
+ registered with the app and the blueprint.
+
+ If any :meth:`before_request` handler returns a non-None value, the
+ value is handled as if it was the return value from the view, and
+ further request handling is stopped.
+ """
+
+ bp = _request_ctx_stack.top.request.blueprint
+
+ funcs = self.url_value_preprocessors.get(None, ())
+ if bp is not None and bp in self.url_value_preprocessors:
+ funcs = chain(funcs, self.url_value_preprocessors[bp])
+ for func in funcs:
+ func(request.endpoint, request.view_args)
+
+ funcs = self.before_request_funcs.get(None, ())
+ if bp is not None and bp in self.before_request_funcs:
+ funcs = chain(funcs, self.before_request_funcs[bp])
+ for func in funcs:
+ rv = func()
+ if rv is not None:
+ return rv
+
+ def process_response(self, response):
+ """Can be overridden in order to modify the response object
+ before it's sent to the WSGI server. By default this will
+ call all the :meth:`after_request` decorated functions.
+
+ .. versionchanged:: 0.5
+ As of Flask 0.5 the functions registered for after request
+ execution are called in reverse order of registration.
+
+ :param response: a :attr:`response_class` object.
+ :return: a new response object or the same, has to be an
+ instance of :attr:`response_class`.
+ """
+ ctx = _request_ctx_stack.top
+ bp = ctx.request.blueprint
+ funcs = ctx._after_request_functions
+ if bp is not None and bp in self.after_request_funcs:
+ funcs = chain(funcs, reversed(self.after_request_funcs[bp]))
+ if None in self.after_request_funcs:
+ funcs = chain(funcs, reversed(self.after_request_funcs[None]))
+ for handler in funcs:
+ response = handler(response)
+ if not self.session_interface.is_null_session(ctx.session):
+ self.session_interface.save_session(self, ctx.session, response)
+ return response
+
+ def do_teardown_request(self, exc=_sentinel):
+ """Called after the request is dispatched and the response is
+ returned, right before the request context is popped.
+
+ This calls all functions decorated with
+ :meth:`teardown_request`, and :meth:`Blueprint.teardown_request`
+ if a blueprint handled the request. Finally, the
+ :data:`request_tearing_down` signal is sent.
+
+ This is called by
+ :meth:`RequestContext.pop() `,
+ which may be delayed during testing to maintain access to
+ resources.
+
+ :param exc: An unhandled exception raised while dispatching the
+ request. Detected from the current exception information if
+ not passed. Passed to each teardown function.
+
+ .. versionchanged:: 0.9
+ Added the ``exc`` argument.
+ """
+ if exc is _sentinel:
+ exc = sys.exc_info()[1]
+ funcs = reversed(self.teardown_request_funcs.get(None, ()))
+ bp = _request_ctx_stack.top.request.blueprint
+ if bp is not None and bp in self.teardown_request_funcs:
+ funcs = chain(funcs, reversed(self.teardown_request_funcs[bp]))
+ for func in funcs:
+ func(exc)
+ request_tearing_down.send(self, exc=exc)
+
+ def do_teardown_appcontext(self, exc=_sentinel):
+ """Called right before the application context is popped.
+
+ When handling a request, the application context is popped
+ after the request context. See :meth:`do_teardown_request`.
+
+ This calls all functions decorated with
+ :meth:`teardown_appcontext`. Then the
+ :data:`appcontext_tearing_down` signal is sent.
+
+ This is called by
+ :meth:`AppContext.pop() `.
+
+ .. versionadded:: 0.9
+ """
+ if exc is _sentinel:
+ exc = sys.exc_info()[1]
+ for func in reversed(self.teardown_appcontext_funcs):
+ func(exc)
+ appcontext_tearing_down.send(self, exc=exc)
+
+ def app_context(self):
+ """Create an :class:`~flask.ctx.AppContext`. Use as a ``with``
+ block to push the context, which will make :data:`current_app`
+ point at this application.
+
+ An application context is automatically pushed by
+ :meth:`RequestContext.push() `
+ when handling a request, and when running a CLI command. Use
+ this to manually create a context outside of these situations.
+
+ ::
+
+ with app.app_context():
+ init_db()
+
+ See :doc:`/appcontext`.
+
+ .. versionadded:: 0.9
+ """
+ return AppContext(self)
+
+ def request_context(self, environ):
+ """Create a :class:`~flask.ctx.RequestContext` representing a
+ WSGI environment. Use a ``with`` block to push the context,
+ which will make :data:`request` point at this request.
+
+ See :doc:`/reqcontext`.
+
+ Typically you should not call this from your own code. A request
+ context is automatically pushed by the :meth:`wsgi_app` when
+ handling a request. Use :meth:`test_request_context` to create
+ an environment and context instead of this method.
+
+ :param environ: a WSGI environment
+ """
+ return RequestContext(self, environ)
+
+ def test_request_context(self, *args, **kwargs):
+ """Create a :class:`~flask.ctx.RequestContext` for a WSGI
+ environment created from the given values. This is mostly useful
+ during testing, where you may want to run a function that uses
+ request data without dispatching a full request.
+
+ See :doc:`/reqcontext`.
+
+ Use a ``with`` block to push the context, which will make
+ :data:`request` point at the request for the created
+ environment. ::
+
+ with test_request_context(...):
+ generate_report()
+
+ When using the shell, it may be easier to push and pop the
+ context manually to avoid indentation. ::
+
+ ctx = app.test_request_context(...)
+ ctx.push()
+ ...
+ ctx.pop()
+
+ Takes the same arguments as Werkzeug's
+ :class:`~werkzeug.test.EnvironBuilder`, with some defaults from
+ the application. See the linked Werkzeug docs for most of the
+ available arguments. Flask-specific behavior is listed here.
+
+ :param path: URL path being requested.
+ :param base_url: Base URL where the app is being served, which
+ ``path`` is relative to. If not given, built from
+ :data:`PREFERRED_URL_SCHEME`, ``subdomain``,
+ :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`.
+ :param subdomain: Subdomain name to append to
+ :data:`SERVER_NAME`.
+ :param url_scheme: Scheme to use instead of
+ :data:`PREFERRED_URL_SCHEME`.
+ :param data: The request body, either as a string or a dict of
+ form keys and values.
+ :param json: If given, this is serialized as JSON and passed as
+ ``data``. Also defaults ``content_type`` to
+ ``application/json``.
+ :param args: other positional arguments passed to
+ :class:`~werkzeug.test.EnvironBuilder`.
+ :param kwargs: other keyword arguments passed to
+ :class:`~werkzeug.test.EnvironBuilder`.
+ """
+ from .testing import EnvironBuilder
+
+ builder = EnvironBuilder(self, *args, **kwargs)
+
+ try:
+ return self.request_context(builder.get_environ())
+ finally:
+ builder.close()
+
+ def wsgi_app(self, environ, start_response):
+ """The actual WSGI application. This is not implemented in
+ :meth:`__call__` so that middlewares can be applied without
+ losing a reference to the app object. Instead of doing this::
+
+ app = MyMiddleware(app)
+
+ It's a better idea to do this instead::
+
+ app.wsgi_app = MyMiddleware(app.wsgi_app)
+
+ Then you still have the original application object around and
+ can continue to call methods on it.
+
+ .. versionchanged:: 0.7
+ Teardown events for the request and app contexts are called
+ even if an unhandled error occurs. Other events may not be
+ called depending on when an error occurs during dispatch.
+ See :ref:`callbacks-and-errors`.
+
+ :param environ: A WSGI environment.
+ :param start_response: A callable accepting a status code,
+ a list of headers, and an optional exception context to
+ start the response.
+ """
+ ctx = self.request_context(environ)
+ error = None
+ try:
+ try:
+ ctx.push()
+ response = self.full_dispatch_request()
+ except Exception as e:
+ error = e
+ response = self.handle_exception(e)
+ except: # noqa: B001
+ error = sys.exc_info()[1]
+ raise
+ return response(environ, start_response)
+ finally:
+ if self.should_ignore_error(error):
+ error = None
+ ctx.auto_pop(error)
+
+ def __call__(self, environ, start_response):
+ """The WSGI server calls the Flask application object as the
+ WSGI application. This calls :meth:`wsgi_app` which can be
+ wrapped to applying middleware."""
+ return self.wsgi_app(environ, start_response)
+
+ def __repr__(self):
+ return "<%s %r>" % (self.__class__.__name__, self.name)
diff --git a/matteo_env/Lib/site-packages/flask/blueprints.py b/matteo_env/Lib/site-packages/flask/blueprints.py
new file mode 100644
index 0000000..8978104
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/blueprints.py
@@ -0,0 +1,569 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.blueprints
+ ~~~~~~~~~~~~~~~~
+
+ Blueprints are the recommended way to implement larger or more
+ pluggable applications in Flask 0.7 and later.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+from functools import update_wrapper
+
+from .helpers import _endpoint_from_view_func
+from .helpers import _PackageBoundObject
+
+# a singleton sentinel value for parameter defaults
+_sentinel = object()
+
+
+class BlueprintSetupState(object):
+ """Temporary holder object for registering a blueprint with the
+ application. An instance of this class is created by the
+ :meth:`~flask.Blueprint.make_setup_state` method and later passed
+ to all register callback functions.
+ """
+
+ def __init__(self, blueprint, app, options, first_registration):
+ #: a reference to the current application
+ self.app = app
+
+ #: a reference to the blueprint that created this setup state.
+ self.blueprint = blueprint
+
+ #: a dictionary with all options that were passed to the
+ #: :meth:`~flask.Flask.register_blueprint` method.
+ self.options = options
+
+ #: as blueprints can be registered multiple times with the
+ #: application and not everything wants to be registered
+ #: multiple times on it, this attribute can be used to figure
+ #: out if the blueprint was registered in the past already.
+ self.first_registration = first_registration
+
+ subdomain = self.options.get("subdomain")
+ if subdomain is None:
+ subdomain = self.blueprint.subdomain
+
+ #: The subdomain that the blueprint should be active for, ``None``
+ #: otherwise.
+ self.subdomain = subdomain
+
+ url_prefix = self.options.get("url_prefix")
+ if url_prefix is None:
+ url_prefix = self.blueprint.url_prefix
+ #: The prefix that should be used for all URLs defined on the
+ #: blueprint.
+ self.url_prefix = url_prefix
+
+ #: A dictionary with URL defaults that is added to each and every
+ #: URL that was defined with the blueprint.
+ self.url_defaults = dict(self.blueprint.url_values_defaults)
+ self.url_defaults.update(self.options.get("url_defaults", ()))
+
+ def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
+ """A helper method to register a rule (and optionally a view function)
+ to the application. The endpoint is automatically prefixed with the
+ blueprint's name.
+ """
+ if self.url_prefix is not None:
+ if rule:
+ rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/")))
+ else:
+ rule = self.url_prefix
+ options.setdefault("subdomain", self.subdomain)
+ if endpoint is None:
+ endpoint = _endpoint_from_view_func(view_func)
+ defaults = self.url_defaults
+ if "defaults" in options:
+ defaults = dict(defaults, **options.pop("defaults"))
+ self.app.add_url_rule(
+ rule,
+ "%s.%s" % (self.blueprint.name, endpoint),
+ view_func,
+ defaults=defaults,
+ **options
+ )
+
+
+class Blueprint(_PackageBoundObject):
+ """Represents a blueprint, a collection of routes and other
+ app-related functions that can be registered on a real application
+ later.
+
+ A blueprint is an object that allows defining application functions
+ without requiring an application object ahead of time. It uses the
+ same decorators as :class:`~flask.Flask`, but defers the need for an
+ application by recording them for later registration.
+
+ Decorating a function with a blueprint creates a deferred function
+ that is called with :class:`~flask.blueprints.BlueprintSetupState`
+ when the blueprint is registered on an application.
+
+ See :ref:`blueprints` for more information.
+
+ .. versionchanged:: 1.1.0
+ Blueprints have a ``cli`` group to register nested CLI commands.
+ The ``cli_group`` parameter controls the name of the group under
+ the ``flask`` command.
+
+ .. versionadded:: 0.7
+
+ :param name: The name of the blueprint. Will be prepended to each
+ endpoint name.
+ :param import_name: The name of the blueprint package, usually
+ ``__name__``. This helps locate the ``root_path`` for the
+ blueprint.
+ :param static_folder: A folder with static files that should be
+ served by the blueprint's static route. The path is relative to
+ the blueprint's root path. Blueprint static files are disabled
+ by default.
+ :param static_url_path: The url to serve static files from.
+ Defaults to ``static_folder``. If the blueprint does not have
+ a ``url_prefix``, the app's static route will take precedence,
+ and the blueprint's static files won't be accessible.
+ :param template_folder: A folder with templates that should be added
+ to the app's template search path. The path is relative to the
+ blueprint's root path. Blueprint templates are disabled by
+ default. Blueprint templates have a lower precedence than those
+ in the app's templates folder.
+ :param url_prefix: A path to prepend to all of the blueprint's URLs,
+ to make them distinct from the rest of the app's routes.
+ :param subdomain: A subdomain that blueprint routes will match on by
+ default.
+ :param url_defaults: A dict of default values that blueprint routes
+ will receive by default.
+ :param root_path: By default, the blueprint will automatically this
+ based on ``import_name``. In certain situations this automatic
+ detection can fail, so the path can be specified manually
+ instead.
+ """
+
+ warn_on_modifications = False
+ _got_registered_once = False
+
+ #: Blueprint local JSON decoder class to use.
+ #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`.
+ json_encoder = None
+ #: Blueprint local JSON decoder class to use.
+ #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`.
+ json_decoder = None
+
+ # TODO remove the next three attrs when Sphinx :inherited-members: works
+ # https://github.com/sphinx-doc/sphinx/issues/741
+
+ #: The name of the package or module that this app belongs to. Do not
+ #: change this once it is set by the constructor.
+ import_name = None
+
+ #: Location of the template files to be added to the template lookup.
+ #: ``None`` if templates should not be added.
+ template_folder = None
+
+ #: Absolute path to the package on the filesystem. Used to look up
+ #: resources contained in the package.
+ root_path = None
+
+ def __init__(
+ self,
+ name,
+ import_name,
+ static_folder=None,
+ static_url_path=None,
+ template_folder=None,
+ url_prefix=None,
+ subdomain=None,
+ url_defaults=None,
+ root_path=None,
+ cli_group=_sentinel,
+ ):
+ _PackageBoundObject.__init__(
+ self, import_name, template_folder, root_path=root_path
+ )
+ self.name = name
+ self.url_prefix = url_prefix
+ self.subdomain = subdomain
+ self.static_folder = static_folder
+ self.static_url_path = static_url_path
+ self.deferred_functions = []
+ if url_defaults is None:
+ url_defaults = {}
+ self.url_values_defaults = url_defaults
+ self.cli_group = cli_group
+
+ def record(self, func):
+ """Registers a function that is called when the blueprint is
+ registered on the application. This function is called with the
+ state as argument as returned by the :meth:`make_setup_state`
+ method.
+ """
+ if self._got_registered_once and self.warn_on_modifications:
+ from warnings import warn
+
+ warn(
+ Warning(
+ "The blueprint was already registered once "
+ "but is getting modified now. These changes "
+ "will not show up."
+ )
+ )
+ self.deferred_functions.append(func)
+
+ def record_once(self, func):
+ """Works like :meth:`record` but wraps the function in another
+ function that will ensure the function is only called once. If the
+ blueprint is registered a second time on the application, the
+ function passed is not called.
+ """
+
+ def wrapper(state):
+ if state.first_registration:
+ func(state)
+
+ return self.record(update_wrapper(wrapper, func))
+
+ def make_setup_state(self, app, options, first_registration=False):
+ """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState`
+ object that is later passed to the register callback functions.
+ Subclasses can override this to return a subclass of the setup state.
+ """
+ return BlueprintSetupState(self, app, options, first_registration)
+
+ def register(self, app, options, first_registration=False):
+ """Called by :meth:`Flask.register_blueprint` to register all views
+ and callbacks registered on the blueprint with the application. Creates
+ a :class:`.BlueprintSetupState` and calls each :meth:`record` callback
+ with it.
+
+ :param app: The application this blueprint is being registered with.
+ :param options: Keyword arguments forwarded from
+ :meth:`~Flask.register_blueprint`.
+ :param first_registration: Whether this is the first time this
+ blueprint has been registered on the application.
+ """
+ self._got_registered_once = True
+ state = self.make_setup_state(app, options, first_registration)
+
+ if self.has_static_folder:
+ state.add_url_rule(
+ self.static_url_path + "/",
+ view_func=self.send_static_file,
+ endpoint="static",
+ )
+
+ for deferred in self.deferred_functions:
+ deferred(state)
+
+ cli_resolved_group = options.get("cli_group", self.cli_group)
+
+ if not self.cli.commands:
+ return
+
+ if cli_resolved_group is None:
+ app.cli.commands.update(self.cli.commands)
+ elif cli_resolved_group is _sentinel:
+ self.cli.name = self.name
+ app.cli.add_command(self.cli)
+ else:
+ self.cli.name = cli_resolved_group
+ app.cli.add_command(self.cli)
+
+ def route(self, rule, **options):
+ """Like :meth:`Flask.route` but for a blueprint. The endpoint for the
+ :func:`url_for` function is prefixed with the name of the blueprint.
+ """
+
+ def decorator(f):
+ endpoint = options.pop("endpoint", f.__name__)
+ self.add_url_rule(rule, endpoint, f, **options)
+ return f
+
+ return decorator
+
+ def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
+ """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for
+ the :func:`url_for` function is prefixed with the name of the blueprint.
+ """
+ if endpoint:
+ assert "." not in endpoint, "Blueprint endpoints should not contain dots"
+ if view_func and hasattr(view_func, "__name__"):
+ assert (
+ "." not in view_func.__name__
+ ), "Blueprint view function name should not contain dots"
+ self.record(lambda s: s.add_url_rule(rule, endpoint, view_func, **options))
+
+ def endpoint(self, endpoint):
+ """Like :meth:`Flask.endpoint` but for a blueprint. This does not
+ prefix the endpoint with the blueprint name, this has to be done
+ explicitly by the user of this method. If the endpoint is prefixed
+ with a `.` it will be registered to the current blueprint, otherwise
+ it's an application independent endpoint.
+ """
+
+ def decorator(f):
+ def register_endpoint(state):
+ state.app.view_functions[endpoint] = f
+
+ self.record_once(register_endpoint)
+ return f
+
+ return decorator
+
+ def app_template_filter(self, name=None):
+ """Register a custom template filter, available application wide. Like
+ :meth:`Flask.template_filter` but for a blueprint.
+
+ :param name: the optional name of the filter, otherwise the
+ function name will be used.
+ """
+
+ def decorator(f):
+ self.add_app_template_filter(f, name=name)
+ return f
+
+ return decorator
+
+ def add_app_template_filter(self, f, name=None):
+ """Register a custom template filter, available application wide. Like
+ :meth:`Flask.add_template_filter` but for a blueprint. Works exactly
+ like the :meth:`app_template_filter` decorator.
+
+ :param name: the optional name of the filter, otherwise the
+ function name will be used.
+ """
+
+ def register_template(state):
+ state.app.jinja_env.filters[name or f.__name__] = f
+
+ self.record_once(register_template)
+
+ def app_template_test(self, name=None):
+ """Register a custom template test, available application wide. Like
+ :meth:`Flask.template_test` but for a blueprint.
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the test, otherwise the
+ function name will be used.
+ """
+
+ def decorator(f):
+ self.add_app_template_test(f, name=name)
+ return f
+
+ return decorator
+
+ def add_app_template_test(self, f, name=None):
+ """Register a custom template test, available application wide. Like
+ :meth:`Flask.add_template_test` but for a blueprint. Works exactly
+ like the :meth:`app_template_test` decorator.
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the test, otherwise the
+ function name will be used.
+ """
+
+ def register_template(state):
+ state.app.jinja_env.tests[name or f.__name__] = f
+
+ self.record_once(register_template)
+
+ def app_template_global(self, name=None):
+ """Register a custom template global, available application wide. Like
+ :meth:`Flask.template_global` but for a blueprint.
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the global, otherwise the
+ function name will be used.
+ """
+
+ def decorator(f):
+ self.add_app_template_global(f, name=name)
+ return f
+
+ return decorator
+
+ def add_app_template_global(self, f, name=None):
+ """Register a custom template global, available application wide. Like
+ :meth:`Flask.add_template_global` but for a blueprint. Works exactly
+ like the :meth:`app_template_global` decorator.
+
+ .. versionadded:: 0.10
+
+ :param name: the optional name of the global, otherwise the
+ function name will be used.
+ """
+
+ def register_template(state):
+ state.app.jinja_env.globals[name or f.__name__] = f
+
+ self.record_once(register_template)
+
+ def before_request(self, f):
+ """Like :meth:`Flask.before_request` but for a blueprint. This function
+ is only executed before each request that is handled by a function of
+ that blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.before_request_funcs.setdefault(self.name, []).append(f)
+ )
+ return f
+
+ def before_app_request(self, f):
+ """Like :meth:`Flask.before_request`. Such a function is executed
+ before each request, even if outside of a blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.before_request_funcs.setdefault(None, []).append(f)
+ )
+ return f
+
+ def before_app_first_request(self, f):
+ """Like :meth:`Flask.before_first_request`. Such a function is
+ executed before the first request to the application.
+ """
+ self.record_once(lambda s: s.app.before_first_request_funcs.append(f))
+ return f
+
+ def after_request(self, f):
+ """Like :meth:`Flask.after_request` but for a blueprint. This function
+ is only executed after each request that is handled by a function of
+ that blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.after_request_funcs.setdefault(self.name, []).append(f)
+ )
+ return f
+
+ def after_app_request(self, f):
+ """Like :meth:`Flask.after_request` but for a blueprint. Such a function
+ is executed after each request, even if outside of the blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.after_request_funcs.setdefault(None, []).append(f)
+ )
+ return f
+
+ def teardown_request(self, f):
+ """Like :meth:`Flask.teardown_request` but for a blueprint. This
+ function is only executed when tearing down requests handled by a
+ function of that blueprint. Teardown request functions are executed
+ when the request context is popped, even when no actual request was
+ performed.
+ """
+ self.record_once(
+ lambda s: s.app.teardown_request_funcs.setdefault(self.name, []).append(f)
+ )
+ return f
+
+ def teardown_app_request(self, f):
+ """Like :meth:`Flask.teardown_request` but for a blueprint. Such a
+ function is executed when tearing down each request, even if outside of
+ the blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f)
+ )
+ return f
+
+ def context_processor(self, f):
+ """Like :meth:`Flask.context_processor` but for a blueprint. This
+ function is only executed for requests handled by a blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.template_context_processors.setdefault(
+ self.name, []
+ ).append(f)
+ )
+ return f
+
+ def app_context_processor(self, f):
+ """Like :meth:`Flask.context_processor` but for a blueprint. Such a
+ function is executed each request, even if outside of the blueprint.
+ """
+ self.record_once(
+ lambda s: s.app.template_context_processors.setdefault(None, []).append(f)
+ )
+ return f
+
+ def app_errorhandler(self, code):
+ """Like :meth:`Flask.errorhandler` but for a blueprint. This
+ handler is used for all requests, even if outside of the blueprint.
+ """
+
+ def decorator(f):
+ self.record_once(lambda s: s.app.errorhandler(code)(f))
+ return f
+
+ return decorator
+
+ def url_value_preprocessor(self, f):
+ """Registers a function as URL value preprocessor for this
+ blueprint. It's called before the view functions are called and
+ can modify the url values provided.
+ """
+ self.record_once(
+ lambda s: s.app.url_value_preprocessors.setdefault(self.name, []).append(f)
+ )
+ return f
+
+ def url_defaults(self, f):
+ """Callback function for URL defaults for this blueprint. It's called
+ with the endpoint and values and should update the values passed
+ in place.
+ """
+ self.record_once(
+ lambda s: s.app.url_default_functions.setdefault(self.name, []).append(f)
+ )
+ return f
+
+ def app_url_value_preprocessor(self, f):
+ """Same as :meth:`url_value_preprocessor` but application wide.
+ """
+ self.record_once(
+ lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f)
+ )
+ return f
+
+ def app_url_defaults(self, f):
+ """Same as :meth:`url_defaults` but application wide.
+ """
+ self.record_once(
+ lambda s: s.app.url_default_functions.setdefault(None, []).append(f)
+ )
+ return f
+
+ def errorhandler(self, code_or_exception):
+ """Registers an error handler that becomes active for this blueprint
+ only. Please be aware that routing does not happen local to a
+ blueprint so an error handler for 404 usually is not handled by
+ a blueprint unless it is caused inside a view function. Another
+ special case is the 500 internal server error which is always looked
+ up from the application.
+
+ Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator
+ of the :class:`~flask.Flask` object.
+ """
+
+ def decorator(f):
+ self.record_once(
+ lambda s: s.app._register_error_handler(self.name, code_or_exception, f)
+ )
+ return f
+
+ return decorator
+
+ def register_error_handler(self, code_or_exception, f):
+ """Non-decorator version of the :meth:`errorhandler` error attach
+ function, akin to the :meth:`~flask.Flask.register_error_handler`
+ application-wide function of the :class:`~flask.Flask` object but
+ for error handlers limited to this blueprint.
+
+ .. versionadded:: 0.11
+ """
+ self.record_once(
+ lambda s: s.app._register_error_handler(self.name, code_or_exception, f)
+ )
diff --git a/matteo_env/Lib/site-packages/flask/cli.py b/matteo_env/Lib/site-packages/flask/cli.py
new file mode 100644
index 0000000..c09b2cd
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/cli.py
@@ -0,0 +1,971 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.cli
+ ~~~~~~~~~
+
+ A simple command line application to run flask apps.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+from __future__ import print_function
+
+import ast
+import inspect
+import os
+import platform
+import re
+import sys
+import traceback
+from functools import update_wrapper
+from operator import attrgetter
+from threading import Lock
+from threading import Thread
+
+import click
+from werkzeug.utils import import_string
+
+from ._compat import getargspec
+from ._compat import itervalues
+from ._compat import reraise
+from ._compat import text_type
+from .globals import current_app
+from .helpers import get_debug_flag
+from .helpers import get_env
+from .helpers import get_load_dotenv
+
+try:
+ import dotenv
+except ImportError:
+ dotenv = None
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+
+class NoAppException(click.UsageError):
+ """Raised if an application cannot be found or loaded."""
+
+
+def find_best_app(script_info, module):
+ """Given a module instance this tries to find the best possible
+ application in the module or raises an exception.
+ """
+ from . import Flask
+
+ # Search for the most common names first.
+ for attr_name in ("app", "application"):
+ app = getattr(module, attr_name, None)
+
+ if isinstance(app, Flask):
+ return app
+
+ # Otherwise find the only object that is a Flask instance.
+ matches = [v for v in itervalues(module.__dict__) if isinstance(v, Flask)]
+
+ if len(matches) == 1:
+ return matches[0]
+ elif len(matches) > 1:
+ raise NoAppException(
+ 'Detected multiple Flask applications in module "{module}". Use '
+ '"FLASK_APP={module}:name" to specify the correct '
+ "one.".format(module=module.__name__)
+ )
+
+ # Search for app factory functions.
+ for attr_name in ("create_app", "make_app"):
+ app_factory = getattr(module, attr_name, None)
+
+ if inspect.isfunction(app_factory):
+ try:
+ app = call_factory(script_info, app_factory)
+
+ if isinstance(app, Flask):
+ return app
+ except TypeError:
+ if not _called_with_wrong_args(app_factory):
+ raise
+ raise NoAppException(
+ 'Detected factory "{factory}" in module "{module}", but '
+ "could not call it without arguments. Use "
+ "\"FLASK_APP='{module}:{factory}(args)'\" to specify "
+ "arguments.".format(factory=attr_name, module=module.__name__)
+ )
+
+ raise NoAppException(
+ 'Failed to find Flask application or factory in module "{module}". '
+ 'Use "FLASK_APP={module}:name to specify one.'.format(module=module.__name__)
+ )
+
+
+def call_factory(script_info, app_factory, arguments=()):
+ """Takes an app factory, a ``script_info` object and optionally a tuple
+ of arguments. Checks for the existence of a script_info argument and calls
+ the app_factory depending on that and the arguments provided.
+ """
+ args_spec = getargspec(app_factory)
+ arg_names = args_spec.args
+ arg_defaults = args_spec.defaults
+
+ if "script_info" in arg_names:
+ return app_factory(*arguments, script_info=script_info)
+ elif arguments:
+ return app_factory(*arguments)
+ elif not arguments and len(arg_names) == 1 and arg_defaults is None:
+ return app_factory(script_info)
+
+ return app_factory()
+
+
+def _called_with_wrong_args(factory):
+ """Check whether calling a function raised a ``TypeError`` because
+ the call failed or because something in the factory raised the
+ error.
+
+ :param factory: the factory function that was called
+ :return: true if the call failed
+ """
+ tb = sys.exc_info()[2]
+
+ try:
+ while tb is not None:
+ if tb.tb_frame.f_code is factory.__code__:
+ # in the factory, it was called successfully
+ return False
+
+ tb = tb.tb_next
+
+ # didn't reach the factory
+ return True
+ finally:
+ # explicitly delete tb as it is circular referenced
+ # https://docs.python.org/2/library/sys.html#sys.exc_info
+ del tb
+
+
+def find_app_by_string(script_info, module, app_name):
+ """Checks if the given string is a variable name or a function. If it is a
+ function, it checks for specified arguments and whether it takes a
+ ``script_info`` argument and calls the function with the appropriate
+ arguments.
+ """
+ from . import Flask
+
+ match = re.match(r"^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$", app_name)
+
+ if not match:
+ raise NoAppException(
+ '"{name}" is not a valid variable name or function '
+ "expression.".format(name=app_name)
+ )
+
+ name, args = match.groups()
+
+ try:
+ attr = getattr(module, name)
+ except AttributeError as e:
+ raise NoAppException(e.args[0])
+
+ if inspect.isfunction(attr):
+ if args:
+ try:
+ args = ast.literal_eval("({args},)".format(args=args))
+ except (ValueError, SyntaxError) as e:
+ raise NoAppException(
+ "Could not parse the arguments in "
+ '"{app_name}".'.format(e=e, app_name=app_name)
+ )
+ else:
+ args = ()
+
+ try:
+ app = call_factory(script_info, attr, args)
+ except TypeError as e:
+ if not _called_with_wrong_args(attr):
+ raise
+
+ raise NoAppException(
+ '{e}\nThe factory "{app_name}" in module "{module}" could not '
+ "be called with the specified arguments.".format(
+ e=e, app_name=app_name, module=module.__name__
+ )
+ )
+ else:
+ app = attr
+
+ if isinstance(app, Flask):
+ return app
+
+ raise NoAppException(
+ "A valid Flask application was not obtained from "
+ '"{module}:{app_name}".'.format(module=module.__name__, app_name=app_name)
+ )
+
+
+def prepare_import(path):
+ """Given a filename this will try to calculate the python path, add it
+ to the search path and return the actual module name that is expected.
+ """
+ path = os.path.realpath(path)
+
+ fname, ext = os.path.splitext(path)
+ if ext == ".py":
+ path = fname
+
+ if os.path.basename(path) == "__init__":
+ path = os.path.dirname(path)
+
+ module_name = []
+
+ # move up until outside package structure (no __init__.py)
+ while True:
+ path, name = os.path.split(path)
+ module_name.append(name)
+
+ if not os.path.exists(os.path.join(path, "__init__.py")):
+ break
+
+ if sys.path[0] != path:
+ sys.path.insert(0, path)
+
+ return ".".join(module_name[::-1])
+
+
+def locate_app(script_info, module_name, app_name, raise_if_not_found=True):
+ __traceback_hide__ = True # noqa: F841
+
+ try:
+ __import__(module_name)
+ except ImportError:
+ # Reraise the ImportError if it occurred within the imported module.
+ # Determine this by checking whether the trace has a depth > 1.
+ if sys.exc_info()[-1].tb_next:
+ raise NoAppException(
+ 'While importing "{name}", an ImportError was raised:'
+ "\n\n{tb}".format(name=module_name, tb=traceback.format_exc())
+ )
+ elif raise_if_not_found:
+ raise NoAppException('Could not import "{name}".'.format(name=module_name))
+ else:
+ return
+
+ module = sys.modules[module_name]
+
+ if app_name is None:
+ return find_best_app(script_info, module)
+ else:
+ return find_app_by_string(script_info, module, app_name)
+
+
+def get_version(ctx, param, value):
+ if not value or ctx.resilient_parsing:
+ return
+
+ import werkzeug
+ from . import __version__
+
+ message = "Python %(python)s\nFlask %(flask)s\nWerkzeug %(werkzeug)s"
+ click.echo(
+ message
+ % {
+ "python": platform.python_version(),
+ "flask": __version__,
+ "werkzeug": werkzeug.__version__,
+ },
+ color=ctx.color,
+ )
+ ctx.exit()
+
+
+version_option = click.Option(
+ ["--version"],
+ help="Show the flask version",
+ expose_value=False,
+ callback=get_version,
+ is_flag=True,
+ is_eager=True,
+)
+
+
+class DispatchingApp(object):
+ """Special application that dispatches to a Flask application which
+ is imported by name in a background thread. If an error happens
+ it is recorded and shown as part of the WSGI handling which in case
+ of the Werkzeug debugger means that it shows up in the browser.
+ """
+
+ def __init__(self, loader, use_eager_loading=False):
+ self.loader = loader
+ self._app = None
+ self._lock = Lock()
+ self._bg_loading_exc_info = None
+ if use_eager_loading:
+ self._load_unlocked()
+ else:
+ self._load_in_background()
+
+ def _load_in_background(self):
+ def _load_app():
+ __traceback_hide__ = True # noqa: F841
+ with self._lock:
+ try:
+ self._load_unlocked()
+ except Exception:
+ self._bg_loading_exc_info = sys.exc_info()
+
+ t = Thread(target=_load_app, args=())
+ t.start()
+
+ def _flush_bg_loading_exception(self):
+ __traceback_hide__ = True # noqa: F841
+ exc_info = self._bg_loading_exc_info
+ if exc_info is not None:
+ self._bg_loading_exc_info = None
+ reraise(*exc_info)
+
+ def _load_unlocked(self):
+ __traceback_hide__ = True # noqa: F841
+ self._app = rv = self.loader()
+ self._bg_loading_exc_info = None
+ return rv
+
+ def __call__(self, environ, start_response):
+ __traceback_hide__ = True # noqa: F841
+ if self._app is not None:
+ return self._app(environ, start_response)
+ self._flush_bg_loading_exception()
+ with self._lock:
+ if self._app is not None:
+ rv = self._app
+ else:
+ rv = self._load_unlocked()
+ return rv(environ, start_response)
+
+
+class ScriptInfo(object):
+ """Helper object to deal with Flask applications. This is usually not
+ necessary to interface with as it's used internally in the dispatching
+ to click. In future versions of Flask this object will most likely play
+ a bigger role. Typically it's created automatically by the
+ :class:`FlaskGroup` but you can also manually create it and pass it
+ onwards as click object.
+ """
+
+ def __init__(self, app_import_path=None, create_app=None, set_debug_flag=True):
+ #: Optionally the import path for the Flask application.
+ self.app_import_path = app_import_path or os.environ.get("FLASK_APP")
+ #: Optionally a function that is passed the script info to create
+ #: the instance of the application.
+ self.create_app = create_app
+ #: A dictionary with arbitrary data that can be associated with
+ #: this script info.
+ self.data = {}
+ self.set_debug_flag = set_debug_flag
+ self._loaded_app = None
+
+ def load_app(self):
+ """Loads the Flask app (if not yet loaded) and returns it. Calling
+ this multiple times will just result in the already loaded app to
+ be returned.
+ """
+ __traceback_hide__ = True # noqa: F841
+
+ if self._loaded_app is not None:
+ return self._loaded_app
+
+ app = None
+
+ if self.create_app is not None:
+ app = call_factory(self, self.create_app)
+ else:
+ if self.app_import_path:
+ path, name = (
+ re.split(r":(?![\\/])", self.app_import_path, 1) + [None]
+ )[:2]
+ import_name = prepare_import(path)
+ app = locate_app(self, import_name, name)
+ else:
+ for path in ("wsgi.py", "app.py"):
+ import_name = prepare_import(path)
+ app = locate_app(self, import_name, None, raise_if_not_found=False)
+
+ if app:
+ break
+
+ if not app:
+ raise NoAppException(
+ "Could not locate a Flask application. You did not provide "
+ 'the "FLASK_APP" environment variable, and a "wsgi.py" or '
+ '"app.py" module was not found in the current directory.'
+ )
+
+ if self.set_debug_flag:
+ # Update the app's debug flag through the descriptor so that
+ # other values repopulate as well.
+ app.debug = get_debug_flag()
+
+ self._loaded_app = app
+ return app
+
+
+pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True)
+
+
+def with_appcontext(f):
+ """Wraps a callback so that it's guaranteed to be executed with the
+ script's application context. If callbacks are registered directly
+ to the ``app.cli`` object then they are wrapped with this function
+ by default unless it's disabled.
+ """
+
+ @click.pass_context
+ def decorator(__ctx, *args, **kwargs):
+ with __ctx.ensure_object(ScriptInfo).load_app().app_context():
+ return __ctx.invoke(f, *args, **kwargs)
+
+ return update_wrapper(decorator, f)
+
+
+class AppGroup(click.Group):
+ """This works similar to a regular click :class:`~click.Group` but it
+ changes the behavior of the :meth:`command` decorator so that it
+ automatically wraps the functions in :func:`with_appcontext`.
+
+ Not to be confused with :class:`FlaskGroup`.
+ """
+
+ def command(self, *args, **kwargs):
+ """This works exactly like the method of the same name on a regular
+ :class:`click.Group` but it wraps callbacks in :func:`with_appcontext`
+ unless it's disabled by passing ``with_appcontext=False``.
+ """
+ wrap_for_ctx = kwargs.pop("with_appcontext", True)
+
+ def decorator(f):
+ if wrap_for_ctx:
+ f = with_appcontext(f)
+ return click.Group.command(self, *args, **kwargs)(f)
+
+ return decorator
+
+ def group(self, *args, **kwargs):
+ """This works exactly like the method of the same name on a regular
+ :class:`click.Group` but it defaults the group class to
+ :class:`AppGroup`.
+ """
+ kwargs.setdefault("cls", AppGroup)
+ return click.Group.group(self, *args, **kwargs)
+
+
+class FlaskGroup(AppGroup):
+ """Special subclass of the :class:`AppGroup` group that supports
+ loading more commands from the configured Flask app. Normally a
+ developer does not have to interface with this class but there are
+ some very advanced use cases for which it makes sense to create an
+ instance of this.
+
+ For information as of why this is useful see :ref:`custom-scripts`.
+
+ :param add_default_commands: if this is True then the default run and
+ shell commands will be added.
+ :param add_version_option: adds the ``--version`` option.
+ :param create_app: an optional callback that is passed the script info and
+ returns the loaded app.
+ :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv`
+ files to set environment variables. Will also change the working
+ directory to the directory containing the first file found.
+ :param set_debug_flag: Set the app's debug flag based on the active
+ environment
+
+ .. versionchanged:: 1.0
+ If installed, python-dotenv will be used to load environment variables
+ from :file:`.env` and :file:`.flaskenv` files.
+ """
+
+ def __init__(
+ self,
+ add_default_commands=True,
+ create_app=None,
+ add_version_option=True,
+ load_dotenv=True,
+ set_debug_flag=True,
+ **extra
+ ):
+ params = list(extra.pop("params", None) or ())
+
+ if add_version_option:
+ params.append(version_option)
+
+ AppGroup.__init__(self, params=params, **extra)
+ self.create_app = create_app
+ self.load_dotenv = load_dotenv
+ self.set_debug_flag = set_debug_flag
+
+ if add_default_commands:
+ self.add_command(run_command)
+ self.add_command(shell_command)
+ self.add_command(routes_command)
+
+ self._loaded_plugin_commands = False
+
+ def _load_plugin_commands(self):
+ if self._loaded_plugin_commands:
+ return
+ try:
+ import pkg_resources
+ except ImportError:
+ self._loaded_plugin_commands = True
+ return
+
+ for ep in pkg_resources.iter_entry_points("flask.commands"):
+ self.add_command(ep.load(), ep.name)
+ self._loaded_plugin_commands = True
+
+ def get_command(self, ctx, name):
+ self._load_plugin_commands()
+
+ # We load built-in commands first as these should always be the
+ # same no matter what the app does. If the app does want to
+ # override this it needs to make a custom instance of this group
+ # and not attach the default commands.
+ #
+ # This also means that the script stays functional in case the
+ # application completely fails.
+ rv = AppGroup.get_command(self, ctx, name)
+ if rv is not None:
+ return rv
+
+ info = ctx.ensure_object(ScriptInfo)
+ try:
+ rv = info.load_app().cli.get_command(ctx, name)
+ if rv is not None:
+ return rv
+ except NoAppException:
+ pass
+
+ def list_commands(self, ctx):
+ self._load_plugin_commands()
+
+ # The commands available is the list of both the application (if
+ # available) plus the builtin commands.
+ rv = set(click.Group.list_commands(self, ctx))
+ info = ctx.ensure_object(ScriptInfo)
+ try:
+ rv.update(info.load_app().cli.list_commands(ctx))
+ except Exception:
+ # Here we intentionally swallow all exceptions as we don't
+ # want the help page to break if the app does not exist.
+ # If someone attempts to use the command we try to create
+ # the app again and this will give us the error.
+ # However, we will not do so silently because that would confuse
+ # users.
+ traceback.print_exc()
+ return sorted(rv)
+
+ def main(self, *args, **kwargs):
+ # Set a global flag that indicates that we were invoked from the
+ # command line interface. This is detected by Flask.run to make the
+ # call into a no-op. This is necessary to avoid ugly errors when the
+ # script that is loaded here also attempts to start a server.
+ os.environ["FLASK_RUN_FROM_CLI"] = "true"
+
+ if get_load_dotenv(self.load_dotenv):
+ load_dotenv()
+
+ obj = kwargs.get("obj")
+
+ if obj is None:
+ obj = ScriptInfo(
+ create_app=self.create_app, set_debug_flag=self.set_debug_flag
+ )
+
+ kwargs["obj"] = obj
+ kwargs.setdefault("auto_envvar_prefix", "FLASK")
+ return super(FlaskGroup, self).main(*args, **kwargs)
+
+
+def _path_is_ancestor(path, other):
+ """Take ``other`` and remove the length of ``path`` from it. Then join it
+ to ``path``. If it is the original value, ``path`` is an ancestor of
+ ``other``."""
+ return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other
+
+
+def load_dotenv(path=None):
+ """Load "dotenv" files in order of precedence to set environment variables.
+
+ If an env var is already set it is not overwritten, so earlier files in the
+ list are preferred over later files.
+
+ Changes the current working directory to the location of the first file
+ found, with the assumption that it is in the top level project directory
+ and will be where the Python path should import local packages from.
+
+ This is a no-op if `python-dotenv`_ is not installed.
+
+ .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme
+
+ :param path: Load the file at this location instead of searching.
+ :return: ``True`` if a file was loaded.
+
+ .. versionchanged:: 1.1.0
+ Returns ``False`` when python-dotenv is not installed, or when
+ the given path isn't a file.
+
+ .. versionadded:: 1.0
+ """
+ if dotenv is None:
+ if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"):
+ click.secho(
+ " * Tip: There are .env or .flaskenv files present."
+ ' Do "pip install python-dotenv" to use them.',
+ fg="yellow",
+ err=True,
+ )
+
+ return False
+
+ # if the given path specifies the actual file then return True,
+ # else False
+ if path is not None:
+ if os.path.isfile(path):
+ return dotenv.load_dotenv(path)
+
+ return False
+
+ new_dir = None
+
+ for name in (".env", ".flaskenv"):
+ path = dotenv.find_dotenv(name, usecwd=True)
+
+ if not path:
+ continue
+
+ if new_dir is None:
+ new_dir = os.path.dirname(path)
+
+ dotenv.load_dotenv(path)
+
+ if new_dir and os.getcwd() != new_dir:
+ os.chdir(new_dir)
+
+ return new_dir is not None # at least one file was located and loaded
+
+
+def show_server_banner(env, debug, app_import_path, eager_loading):
+ """Show extra startup messages the first time the server is run,
+ ignoring the reloader.
+ """
+ if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
+ return
+
+ if app_import_path is not None:
+ message = ' * Serving Flask app "{0}"'.format(app_import_path)
+
+ if not eager_loading:
+ message += " (lazy loading)"
+
+ click.echo(message)
+
+ click.echo(" * Environment: {0}".format(env))
+
+ if env == "production":
+ click.secho(
+ " WARNING: This is a development server. "
+ "Do not use it in a production deployment.",
+ fg="red",
+ )
+ click.secho(" Use a production WSGI server instead.", dim=True)
+
+ if debug is not None:
+ click.echo(" * Debug mode: {0}".format("on" if debug else "off"))
+
+
+class CertParamType(click.ParamType):
+ """Click option type for the ``--cert`` option. Allows either an
+ existing file, the string ``'adhoc'``, or an import for a
+ :class:`~ssl.SSLContext` object.
+ """
+
+ name = "path"
+
+ def __init__(self):
+ self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True)
+
+ def convert(self, value, param, ctx):
+ if ssl is None:
+ raise click.BadParameter(
+ 'Using "--cert" requires Python to be compiled with SSL support.',
+ ctx,
+ param,
+ )
+
+ try:
+ return self.path_type(value, param, ctx)
+ except click.BadParameter:
+ value = click.STRING(value, param, ctx).lower()
+
+ if value == "adhoc":
+ try:
+ import OpenSSL # noqa: F401
+ except ImportError:
+ raise click.BadParameter(
+ "Using ad-hoc certificates requires pyOpenSSL.", ctx, param
+ )
+
+ return value
+
+ obj = import_string(value, silent=True)
+
+ if sys.version_info < (2, 7, 9):
+ if obj:
+ return obj
+ else:
+ if isinstance(obj, ssl.SSLContext):
+ return obj
+
+ raise
+
+
+def _validate_key(ctx, param, value):
+ """The ``--key`` option must be specified when ``--cert`` is a file.
+ Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed.
+ """
+ cert = ctx.params.get("cert")
+ is_adhoc = cert == "adhoc"
+
+ if sys.version_info < (2, 7, 9):
+ is_context = cert and not isinstance(cert, (text_type, bytes))
+ else:
+ is_context = isinstance(cert, ssl.SSLContext)
+
+ if value is not None:
+ if is_adhoc:
+ raise click.BadParameter(
+ 'When "--cert" is "adhoc", "--key" is not used.', ctx, param
+ )
+
+ if is_context:
+ raise click.BadParameter(
+ 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param
+ )
+
+ if not cert:
+ raise click.BadParameter('"--cert" must also be specified.', ctx, param)
+
+ ctx.params["cert"] = cert, value
+
+ else:
+ if cert and not (is_adhoc or is_context):
+ raise click.BadParameter('Required when using "--cert".', ctx, param)
+
+ return value
+
+
+class SeparatedPathType(click.Path):
+ """Click option type that accepts a list of values separated by the
+ OS's path separator (``:``, ``;`` on Windows). Each value is
+ validated as a :class:`click.Path` type.
+ """
+
+ def convert(self, value, param, ctx):
+ items = self.split_envvar_value(value)
+ super_convert = super(SeparatedPathType, self).convert
+ return [super_convert(item, param, ctx) for item in items]
+
+
+@click.command("run", short_help="Run a development server.")
+@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.")
+@click.option("--port", "-p", default=5000, help="The port to bind to.")
+@click.option(
+ "--cert", type=CertParamType(), help="Specify a certificate file to use HTTPS."
+)
+@click.option(
+ "--key",
+ type=click.Path(exists=True, dir_okay=False, resolve_path=True),
+ callback=_validate_key,
+ expose_value=False,
+ help="The key file to use when specifying a certificate.",
+)
+@click.option(
+ "--reload/--no-reload",
+ default=None,
+ help="Enable or disable the reloader. By default the reloader "
+ "is active if debug is enabled.",
+)
+@click.option(
+ "--debugger/--no-debugger",
+ default=None,
+ help="Enable or disable the debugger. By default the debugger "
+ "is active if debug is enabled.",
+)
+@click.option(
+ "--eager-loading/--lazy-loader",
+ default=None,
+ help="Enable or disable eager loading. By default eager "
+ "loading is enabled if the reloader is disabled.",
+)
+@click.option(
+ "--with-threads/--without-threads",
+ default=True,
+ help="Enable or disable multithreading.",
+)
+@click.option(
+ "--extra-files",
+ default=None,
+ type=SeparatedPathType(),
+ help=(
+ "Extra files that trigger a reload on change. Multiple paths"
+ " are separated by '{}'.".format(os.path.pathsep)
+ ),
+)
+@pass_script_info
+def run_command(
+ info, host, port, reload, debugger, eager_loading, with_threads, cert, extra_files
+):
+ """Run a local development server.
+
+ This server is for development purposes only. It does not provide
+ the stability, security, or performance of production WSGI servers.
+
+ The reloader and debugger are enabled by default if
+ FLASK_ENV=development or FLASK_DEBUG=1.
+ """
+ debug = get_debug_flag()
+
+ if reload is None:
+ reload = debug
+
+ if debugger is None:
+ debugger = debug
+
+ if eager_loading is None:
+ eager_loading = not reload
+
+ show_server_banner(get_env(), debug, info.app_import_path, eager_loading)
+ app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
+
+ from werkzeug.serving import run_simple
+
+ run_simple(
+ host,
+ port,
+ app,
+ use_reloader=reload,
+ use_debugger=debugger,
+ threaded=with_threads,
+ ssl_context=cert,
+ extra_files=extra_files,
+ )
+
+
+@click.command("shell", short_help="Run a shell in the app context.")
+@with_appcontext
+def shell_command():
+ """Run an interactive Python shell in the context of a given
+ Flask application. The application will populate the default
+ namespace of this shell according to it's configuration.
+
+ This is useful for executing small snippets of management code
+ without having to manually configure the application.
+ """
+ import code
+ from .globals import _app_ctx_stack
+
+ app = _app_ctx_stack.top.app
+ banner = "Python %s on %s\nApp: %s [%s]\nInstance: %s" % (
+ sys.version,
+ sys.platform,
+ app.import_name,
+ app.env,
+ app.instance_path,
+ )
+ ctx = {}
+
+ # Support the regular Python interpreter startup script if someone
+ # is using it.
+ startup = os.environ.get("PYTHONSTARTUP")
+ if startup and os.path.isfile(startup):
+ with open(startup, "r") as f:
+ eval(compile(f.read(), startup, "exec"), ctx)
+
+ ctx.update(app.make_shell_context())
+
+ code.interact(banner=banner, local=ctx)
+
+
+@click.command("routes", short_help="Show the routes for the app.")
+@click.option(
+ "--sort",
+ "-s",
+ type=click.Choice(("endpoint", "methods", "rule", "match")),
+ default="endpoint",
+ help=(
+ 'Method to sort routes by. "match" is the order that Flask will match '
+ "routes when dispatching a request."
+ ),
+)
+@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.")
+@with_appcontext
+def routes_command(sort, all_methods):
+ """Show all registered routes with endpoints and methods."""
+
+ rules = list(current_app.url_map.iter_rules())
+ if not rules:
+ click.echo("No routes were registered.")
+ return
+
+ ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS"))
+
+ if sort in ("endpoint", "rule"):
+ rules = sorted(rules, key=attrgetter(sort))
+ elif sort == "methods":
+ rules = sorted(rules, key=lambda rule: sorted(rule.methods))
+
+ rule_methods = [", ".join(sorted(rule.methods - ignored_methods)) for rule in rules]
+
+ headers = ("Endpoint", "Methods", "Rule")
+ widths = (
+ max(len(rule.endpoint) for rule in rules),
+ max(len(methods) for methods in rule_methods),
+ max(len(rule.rule) for rule in rules),
+ )
+ widths = [max(len(h), w) for h, w in zip(headers, widths)]
+ row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths)
+
+ click.echo(row.format(*headers).strip())
+ click.echo(row.format(*("-" * width for width in widths)))
+
+ for rule, methods in zip(rules, rule_methods):
+ click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip())
+
+
+cli = FlaskGroup(
+ help="""\
+A general utility script for Flask applications.
+
+Provides commands from Flask, extensions, and the application. Loads the
+application defined in the FLASK_APP environment variable, or from a wsgi.py
+file. Setting the FLASK_ENV environment variable to 'development' will enable
+debug mode.
+
+\b
+ {prefix}{cmd} FLASK_APP=hello.py
+ {prefix}{cmd} FLASK_ENV=development
+ {prefix}flask run
+""".format(
+ cmd="export" if os.name == "posix" else "set",
+ prefix="$ " if os.name == "posix" else "> ",
+ )
+)
+
+
+def main(as_module=False):
+ # TODO omit sys.argv once https://github.com/pallets/click/issues/536 is fixed
+ cli.main(args=sys.argv[1:], prog_name="python -m flask" if as_module else None)
+
+
+if __name__ == "__main__":
+ main(as_module=True)
diff --git a/matteo_env/Lib/site-packages/flask/config.py b/matteo_env/Lib/site-packages/flask/config.py
new file mode 100644
index 0000000..809de33
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/config.py
@@ -0,0 +1,269 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.config
+ ~~~~~~~~~~~~
+
+ Implements the configuration related objects.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+import errno
+import os
+import types
+
+from werkzeug.utils import import_string
+
+from . import json
+from ._compat import iteritems
+from ._compat import string_types
+
+
+class ConfigAttribute(object):
+ """Makes an attribute forward to the config"""
+
+ def __init__(self, name, get_converter=None):
+ self.__name__ = name
+ self.get_converter = get_converter
+
+ def __get__(self, obj, type=None):
+ if obj is None:
+ return self
+ rv = obj.config[self.__name__]
+ if self.get_converter is not None:
+ rv = self.get_converter(rv)
+ return rv
+
+ def __set__(self, obj, value):
+ obj.config[self.__name__] = value
+
+
+class Config(dict):
+ """Works exactly like a dict but provides ways to fill it from files
+ or special dictionaries. There are two common patterns to populate the
+ config.
+
+ Either you can fill the config from a config file::
+
+ app.config.from_pyfile('yourconfig.cfg')
+
+ Or alternatively you can define the configuration options in the
+ module that calls :meth:`from_object` or provide an import path to
+ a module that should be loaded. It is also possible to tell it to
+ use the same module and with that provide the configuration values
+ just before the call::
+
+ DEBUG = True
+ SECRET_KEY = 'development key'
+ app.config.from_object(__name__)
+
+ In both cases (loading from any Python file or loading from modules),
+ only uppercase keys are added to the config. This makes it possible to use
+ lowercase values in the config file for temporary values that are not added
+ to the config or to define the config keys in the same file that implements
+ the application.
+
+ Probably the most interesting way to load configurations is from an
+ environment variable pointing to a file::
+
+ app.config.from_envvar('YOURAPPLICATION_SETTINGS')
+
+ In this case before launching the application you have to set this
+ environment variable to the file you want to use. On Linux and OS X
+ use the export statement::
+
+ export YOURAPPLICATION_SETTINGS='/path/to/config/file'
+
+ On windows use `set` instead.
+
+ :param root_path: path to which files are read relative from. When the
+ config object is created by the application, this is
+ the application's :attr:`~flask.Flask.root_path`.
+ :param defaults: an optional dictionary of default values
+ """
+
+ def __init__(self, root_path, defaults=None):
+ dict.__init__(self, defaults or {})
+ self.root_path = root_path
+
+ def from_envvar(self, variable_name, silent=False):
+ """Loads a configuration from an environment variable pointing to
+ a configuration file. This is basically just a shortcut with nicer
+ error messages for this line of code::
+
+ app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
+
+ :param variable_name: name of the environment variable
+ :param silent: set to ``True`` if you want silent failure for missing
+ files.
+ :return: bool. ``True`` if able to load config, ``False`` otherwise.
+ """
+ rv = os.environ.get(variable_name)
+ if not rv:
+ if silent:
+ return False
+ raise RuntimeError(
+ "The environment variable %r is not set "
+ "and as such configuration could not be "
+ "loaded. Set this variable and make it "
+ "point to a configuration file" % variable_name
+ )
+ return self.from_pyfile(rv, silent=silent)
+
+ def from_pyfile(self, filename, silent=False):
+ """Updates the values in the config from a Python file. This function
+ behaves as if the file was imported as module with the
+ :meth:`from_object` function.
+
+ :param filename: the filename of the config. This can either be an
+ absolute filename or a filename relative to the
+ root path.
+ :param silent: set to ``True`` if you want silent failure for missing
+ files.
+
+ .. versionadded:: 0.7
+ `silent` parameter.
+ """
+ filename = os.path.join(self.root_path, filename)
+ d = types.ModuleType("config")
+ d.__file__ = filename
+ try:
+ with open(filename, mode="rb") as config_file:
+ exec(compile(config_file.read(), filename, "exec"), d.__dict__)
+ except IOError as e:
+ if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR):
+ return False
+ e.strerror = "Unable to load configuration file (%s)" % e.strerror
+ raise
+ self.from_object(d)
+ return True
+
+ def from_object(self, obj):
+ """Updates the values from the given object. An object can be of one
+ of the following two types:
+
+ - a string: in this case the object with that name will be imported
+ - an actual object reference: that object is used directly
+
+ Objects are usually either modules or classes. :meth:`from_object`
+ loads only the uppercase attributes of the module/class. A ``dict``
+ object will not work with :meth:`from_object` because the keys of a
+ ``dict`` are not attributes of the ``dict`` class.
+
+ Example of module-based configuration::
+
+ app.config.from_object('yourapplication.default_config')
+ from yourapplication import default_config
+ app.config.from_object(default_config)
+
+ Nothing is done to the object before loading. If the object is a
+ class and has ``@property`` attributes, it needs to be
+ instantiated before being passed to this method.
+
+ You should not use this function to load the actual configuration but
+ rather configuration defaults. The actual config should be loaded
+ with :meth:`from_pyfile` and ideally from a location not within the
+ package because the package might be installed system wide.
+
+ See :ref:`config-dev-prod` for an example of class-based configuration
+ using :meth:`from_object`.
+
+ :param obj: an import name or object
+ """
+ if isinstance(obj, string_types):
+ obj = import_string(obj)
+ for key in dir(obj):
+ if key.isupper():
+ self[key] = getattr(obj, key)
+
+ def from_json(self, filename, silent=False):
+ """Updates the values in the config from a JSON file. This function
+ behaves as if the JSON object was a dictionary and passed to the
+ :meth:`from_mapping` function.
+
+ :param filename: the filename of the JSON file. This can either be an
+ absolute filename or a filename relative to the
+ root path.
+ :param silent: set to ``True`` if you want silent failure for missing
+ files.
+
+ .. versionadded:: 0.11
+ """
+ filename = os.path.join(self.root_path, filename)
+
+ try:
+ with open(filename) as json_file:
+ obj = json.loads(json_file.read())
+ except IOError as e:
+ if silent and e.errno in (errno.ENOENT, errno.EISDIR):
+ return False
+ e.strerror = "Unable to load configuration file (%s)" % e.strerror
+ raise
+ return self.from_mapping(obj)
+
+ def from_mapping(self, *mapping, **kwargs):
+ """Updates the config like :meth:`update` ignoring items with non-upper
+ keys.
+
+ .. versionadded:: 0.11
+ """
+ mappings = []
+ if len(mapping) == 1:
+ if hasattr(mapping[0], "items"):
+ mappings.append(mapping[0].items())
+ else:
+ mappings.append(mapping[0])
+ elif len(mapping) > 1:
+ raise TypeError(
+ "expected at most 1 positional argument, got %d" % len(mapping)
+ )
+ mappings.append(kwargs.items())
+ for mapping in mappings:
+ for (key, value) in mapping:
+ if key.isupper():
+ self[key] = value
+ return True
+
+ def get_namespace(self, namespace, lowercase=True, trim_namespace=True):
+ """Returns a dictionary containing a subset of configuration options
+ that match the specified namespace/prefix. Example usage::
+
+ app.config['IMAGE_STORE_TYPE'] = 'fs'
+ app.config['IMAGE_STORE_PATH'] = '/var/app/images'
+ app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com'
+ image_store_config = app.config.get_namespace('IMAGE_STORE_')
+
+ The resulting dictionary `image_store_config` would look like::
+
+ {
+ 'type': 'fs',
+ 'path': '/var/app/images',
+ 'base_url': 'http://img.website.com'
+ }
+
+ This is often useful when configuration options map directly to
+ keyword arguments in functions or class constructors.
+
+ :param namespace: a configuration namespace
+ :param lowercase: a flag indicating if the keys of the resulting
+ dictionary should be lowercase
+ :param trim_namespace: a flag indicating if the keys of the resulting
+ dictionary should not include the namespace
+
+ .. versionadded:: 0.11
+ """
+ rv = {}
+ for k, v in iteritems(self):
+ if not k.startswith(namespace):
+ continue
+ if trim_namespace:
+ key = k[len(namespace) :]
+ else:
+ key = k
+ if lowercase:
+ key = key.lower()
+ rv[key] = v
+ return rv
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, dict.__repr__(self))
diff --git a/matteo_env/Lib/site-packages/flask/ctx.py b/matteo_env/Lib/site-packages/flask/ctx.py
new file mode 100644
index 0000000..172f6a0
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/ctx.py
@@ -0,0 +1,475 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.ctx
+ ~~~~~~~~~
+
+ Implements the objects required to keep the context.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+import sys
+from functools import update_wrapper
+
+from werkzeug.exceptions import HTTPException
+
+from ._compat import BROKEN_PYPY_CTXMGR_EXIT
+from ._compat import reraise
+from .globals import _app_ctx_stack
+from .globals import _request_ctx_stack
+from .signals import appcontext_popped
+from .signals import appcontext_pushed
+
+
+# a singleton sentinel value for parameter defaults
+_sentinel = object()
+
+
+class _AppCtxGlobals(object):
+ """A plain object. Used as a namespace for storing data during an
+ application context.
+
+ Creating an app context automatically creates this object, which is
+ made available as the :data:`g` proxy.
+
+ .. describe:: 'key' in g
+
+ Check whether an attribute is present.
+
+ .. versionadded:: 0.10
+
+ .. describe:: iter(g)
+
+ Return an iterator over the attribute names.
+
+ .. versionadded:: 0.10
+ """
+
+ def get(self, name, default=None):
+ """Get an attribute by name, or a default value. Like
+ :meth:`dict.get`.
+
+ :param name: Name of attribute to get.
+ :param default: Value to return if the attribute is not present.
+
+ .. versionadded:: 0.10
+ """
+ return self.__dict__.get(name, default)
+
+ def pop(self, name, default=_sentinel):
+ """Get and remove an attribute by name. Like :meth:`dict.pop`.
+
+ :param name: Name of attribute to pop.
+ :param default: Value to return if the attribute is not present,
+ instead of raise a ``KeyError``.
+
+ .. versionadded:: 0.11
+ """
+ if default is _sentinel:
+ return self.__dict__.pop(name)
+ else:
+ return self.__dict__.pop(name, default)
+
+ def setdefault(self, name, default=None):
+ """Get the value of an attribute if it is present, otherwise
+ set and return a default value. Like :meth:`dict.setdefault`.
+
+ :param name: Name of attribute to get.
+ :param: default: Value to set and return if the attribute is not
+ present.
+
+ .. versionadded:: 0.11
+ """
+ return self.__dict__.setdefault(name, default)
+
+ def __contains__(self, item):
+ return item in self.__dict__
+
+ def __iter__(self):
+ return iter(self.__dict__)
+
+ def __repr__(self):
+ top = _app_ctx_stack.top
+ if top is not None:
+ return "" % top.app.name
+ return object.__repr__(self)
+
+
+def after_this_request(f):
+ """Executes a function after this request. This is useful to modify
+ response objects. The function is passed the response object and has
+ to return the same or a new one.
+
+ Example::
+
+ @app.route('/')
+ def index():
+ @after_this_request
+ def add_header(response):
+ response.headers['X-Foo'] = 'Parachute'
+ return response
+ return 'Hello World!'
+
+ This is more useful if a function other than the view function wants to
+ modify a response. For instance think of a decorator that wants to add
+ some headers without converting the return value into a response object.
+
+ .. versionadded:: 0.9
+ """
+ _request_ctx_stack.top._after_request_functions.append(f)
+ return f
+
+
+def copy_current_request_context(f):
+ """A helper function that decorates a function to retain the current
+ request context. This is useful when working with greenlets. The moment
+ the function is decorated a copy of the request context is created and
+ then pushed when the function is called. The current session is also
+ included in the copied request context.
+
+ Example::
+
+ import gevent
+ from flask import copy_current_request_context
+
+ @app.route('/')
+ def index():
+ @copy_current_request_context
+ def do_some_work():
+ # do some work here, it can access flask.request or
+ # flask.session like you would otherwise in the view function.
+ ...
+ gevent.spawn(do_some_work)
+ return 'Regular response'
+
+ .. versionadded:: 0.10
+ """
+ top = _request_ctx_stack.top
+ if top is None:
+ raise RuntimeError(
+ "This decorator can only be used at local scopes "
+ "when a request context is on the stack. For instance within "
+ "view functions."
+ )
+ reqctx = top.copy()
+
+ def wrapper(*args, **kwargs):
+ with reqctx:
+ return f(*args, **kwargs)
+
+ return update_wrapper(wrapper, f)
+
+
+def has_request_context():
+ """If you have code that wants to test if a request context is there or
+ not this function can be used. For instance, you may want to take advantage
+ of request information if the request object is available, but fail
+ silently if it is unavailable.
+
+ ::
+
+ class User(db.Model):
+
+ def __init__(self, username, remote_addr=None):
+ self.username = username
+ if remote_addr is None and has_request_context():
+ remote_addr = request.remote_addr
+ self.remote_addr = remote_addr
+
+ Alternatively you can also just test any of the context bound objects
+ (such as :class:`request` or :class:`g`) for truthness::
+
+ class User(db.Model):
+
+ def __init__(self, username, remote_addr=None):
+ self.username = username
+ if remote_addr is None and request:
+ remote_addr = request.remote_addr
+ self.remote_addr = remote_addr
+
+ .. versionadded:: 0.7
+ """
+ return _request_ctx_stack.top is not None
+
+
+def has_app_context():
+ """Works like :func:`has_request_context` but for the application
+ context. You can also just do a boolean check on the
+ :data:`current_app` object instead.
+
+ .. versionadded:: 0.9
+ """
+ return _app_ctx_stack.top is not None
+
+
+class AppContext(object):
+ """The application context binds an application object implicitly
+ to the current thread or greenlet, similar to how the
+ :class:`RequestContext` binds request information. The application
+ context is also implicitly created if a request context is created
+ but the application is not on top of the individual application
+ context.
+ """
+
+ def __init__(self, app):
+ self.app = app
+ self.url_adapter = app.create_url_adapter(None)
+ self.g = app.app_ctx_globals_class()
+
+ # Like request context, app contexts can be pushed multiple times
+ # but there a basic "refcount" is enough to track them.
+ self._refcnt = 0
+
+ def push(self):
+ """Binds the app context to the current context."""
+ self._refcnt += 1
+ if hasattr(sys, "exc_clear"):
+ sys.exc_clear()
+ _app_ctx_stack.push(self)
+ appcontext_pushed.send(self.app)
+
+ def pop(self, exc=_sentinel):
+ """Pops the app context."""
+ try:
+ self._refcnt -= 1
+ if self._refcnt <= 0:
+ if exc is _sentinel:
+ exc = sys.exc_info()[1]
+ self.app.do_teardown_appcontext(exc)
+ finally:
+ rv = _app_ctx_stack.pop()
+ assert rv is self, "Popped wrong app context. (%r instead of %r)" % (rv, self)
+ appcontext_popped.send(self.app)
+
+ def __enter__(self):
+ self.push()
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.pop(exc_value)
+
+ if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
+ reraise(exc_type, exc_value, tb)
+
+
+class RequestContext(object):
+ """The request context contains all request relevant information. It is
+ created at the beginning of the request and pushed to the
+ `_request_ctx_stack` and removed at the end of it. It will create the
+ URL adapter and request object for the WSGI environment provided.
+
+ Do not attempt to use this class directly, instead use
+ :meth:`~flask.Flask.test_request_context` and
+ :meth:`~flask.Flask.request_context` to create this object.
+
+ When the request context is popped, it will evaluate all the
+ functions registered on the application for teardown execution
+ (:meth:`~flask.Flask.teardown_request`).
+
+ The request context is automatically popped at the end of the request
+ for you. In debug mode the request context is kept around if
+ exceptions happen so that interactive debuggers have a chance to
+ introspect the data. With 0.4 this can also be forced for requests
+ that did not fail and outside of ``DEBUG`` mode. By setting
+ ``'flask._preserve_context'`` to ``True`` on the WSGI environment the
+ context will not pop itself at the end of the request. This is used by
+ the :meth:`~flask.Flask.test_client` for example to implement the
+ deferred cleanup functionality.
+
+ You might find this helpful for unittests where you need the
+ information from the context local around for a little longer. Make
+ sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
+ that situation, otherwise your unittests will leak memory.
+ """
+
+ def __init__(self, app, environ, request=None, session=None):
+ self.app = app
+ if request is None:
+ request = app.request_class(environ)
+ self.request = request
+ self.url_adapter = None
+ try:
+ self.url_adapter = app.create_url_adapter(self.request)
+ except HTTPException as e:
+ self.request.routing_exception = e
+ self.flashes = None
+ self.session = session
+
+ # Request contexts can be pushed multiple times and interleaved with
+ # other request contexts. Now only if the last level is popped we
+ # get rid of them. Additionally if an application context is missing
+ # one is created implicitly so for each level we add this information
+ self._implicit_app_ctx_stack = []
+
+ # indicator if the context was preserved. Next time another context
+ # is pushed the preserved context is popped.
+ self.preserved = False
+
+ # remembers the exception for pop if there is one in case the context
+ # preservation kicks in.
+ self._preserved_exc = None
+
+ # Functions that should be executed after the request on the response
+ # object. These will be called before the regular "after_request"
+ # functions.
+ self._after_request_functions = []
+
+ @property
+ def g(self):
+ return _app_ctx_stack.top.g
+
+ @g.setter
+ def g(self, value):
+ _app_ctx_stack.top.g = value
+
+ def copy(self):
+ """Creates a copy of this request context with the same request object.
+ This can be used to move a request context to a different greenlet.
+ Because the actual request object is the same this cannot be used to
+ move a request context to a different thread unless access to the
+ request object is locked.
+
+ .. versionadded:: 0.10
+
+ .. versionchanged:: 1.1
+ The current session object is used instead of reloading the original
+ data. This prevents `flask.session` pointing to an out-of-date object.
+ """
+ return self.__class__(
+ self.app,
+ environ=self.request.environ,
+ request=self.request,
+ session=self.session,
+ )
+
+ def match_request(self):
+ """Can be overridden by a subclass to hook into the matching
+ of the request.
+ """
+ try:
+ result = self.url_adapter.match(return_rule=True)
+ self.request.url_rule, self.request.view_args = result
+ except HTTPException as e:
+ self.request.routing_exception = e
+
+ def push(self):
+ """Binds the request context to the current context."""
+ # If an exception occurs in debug mode or if context preservation is
+ # activated under exception situations exactly one context stays
+ # on the stack. The rationale is that you want to access that
+ # information under debug situations. However if someone forgets to
+ # pop that context again we want to make sure that on the next push
+ # it's invalidated, otherwise we run at risk that something leaks
+ # memory. This is usually only a problem in test suite since this
+ # functionality is not active in production environments.
+ top = _request_ctx_stack.top
+ if top is not None and top.preserved:
+ top.pop(top._preserved_exc)
+
+ # Before we push the request context we have to ensure that there
+ # is an application context.
+ app_ctx = _app_ctx_stack.top
+ if app_ctx is None or app_ctx.app != self.app:
+ app_ctx = self.app.app_context()
+ app_ctx.push()
+ self._implicit_app_ctx_stack.append(app_ctx)
+ else:
+ self._implicit_app_ctx_stack.append(None)
+
+ if hasattr(sys, "exc_clear"):
+ sys.exc_clear()
+
+ _request_ctx_stack.push(self)
+
+ # Open the session at the moment that the request context is available.
+ # This allows a custom open_session method to use the request context.
+ # Only open a new session if this is the first time the request was
+ # pushed, otherwise stream_with_context loses the session.
+ if self.session is None:
+ session_interface = self.app.session_interface
+ self.session = session_interface.open_session(self.app, self.request)
+
+ if self.session is None:
+ self.session = session_interface.make_null_session(self.app)
+
+ if self.url_adapter is not None:
+ self.match_request()
+
+ def pop(self, exc=_sentinel):
+ """Pops the request context and unbinds it by doing that. This will
+ also trigger the execution of functions registered by the
+ :meth:`~flask.Flask.teardown_request` decorator.
+
+ .. versionchanged:: 0.9
+ Added the `exc` argument.
+ """
+ app_ctx = self._implicit_app_ctx_stack.pop()
+
+ try:
+ clear_request = False
+ if not self._implicit_app_ctx_stack:
+ self.preserved = False
+ self._preserved_exc = None
+ if exc is _sentinel:
+ exc = sys.exc_info()[1]
+ self.app.do_teardown_request(exc)
+
+ # If this interpreter supports clearing the exception information
+ # we do that now. This will only go into effect on Python 2.x,
+ # on 3.x it disappears automatically at the end of the exception
+ # stack.
+ if hasattr(sys, "exc_clear"):
+ sys.exc_clear()
+
+ request_close = getattr(self.request, "close", None)
+ if request_close is not None:
+ request_close()
+ clear_request = True
+ finally:
+ rv = _request_ctx_stack.pop()
+
+ # get rid of circular dependencies at the end of the request
+ # so that we don't require the GC to be active.
+ if clear_request:
+ rv.request.environ["werkzeug.request"] = None
+
+ # Get rid of the app as well if necessary.
+ if app_ctx is not None:
+ app_ctx.pop(exc)
+
+ assert rv is self, "Popped wrong request context. (%r instead of %r)" % (
+ rv,
+ self,
+ )
+
+ def auto_pop(self, exc):
+ if self.request.environ.get("flask._preserve_context") or (
+ exc is not None and self.app.preserve_context_on_exception
+ ):
+ self.preserved = True
+ self._preserved_exc = exc
+ else:
+ self.pop(exc)
+
+ def __enter__(self):
+ self.push()
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # do not pop the request stack if we are in debug mode and an
+ # exception happened. This will allow the debugger to still
+ # access the request object in the interactive shell. Furthermore
+ # the context can be force kept alive for the test client.
+ # See flask.testing for how this works.
+ self.auto_pop(exc_value)
+
+ if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
+ reraise(exc_type, exc_value, tb)
+
+ def __repr__(self):
+ return "<%s '%s' [%s] of %s>" % (
+ self.__class__.__name__,
+ self.request.url,
+ self.request.method,
+ self.app.name,
+ )
diff --git a/matteo_env/Lib/site-packages/flask/debughelpers.py b/matteo_env/Lib/site-packages/flask/debughelpers.py
new file mode 100644
index 0000000..e475bd1
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/debughelpers.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.debughelpers
+ ~~~~~~~~~~~~~~~~~~
+
+ Various helpers to make the development experience better.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+import os
+from warnings import warn
+
+from ._compat import implements_to_string
+from ._compat import text_type
+from .app import Flask
+from .blueprints import Blueprint
+from .globals import _request_ctx_stack
+
+
+class UnexpectedUnicodeError(AssertionError, UnicodeError):
+ """Raised in places where we want some better error reporting for
+ unexpected unicode or binary data.
+ """
+
+
+@implements_to_string
+class DebugFilesKeyError(KeyError, AssertionError):
+ """Raised from request.files during debugging. The idea is that it can
+ provide a better error message than just a generic KeyError/BadRequest.
+ """
+
+ def __init__(self, request, key):
+ form_matches = request.form.getlist(key)
+ buf = [
+ 'You tried to access the file "%s" in the request.files '
+ "dictionary but it does not exist. The mimetype for the request "
+ 'is "%s" instead of "multipart/form-data" which means that no '
+ "file contents were transmitted. To fix this error you should "
+ 'provide enctype="multipart/form-data" in your form.'
+ % (key, request.mimetype)
+ ]
+ if form_matches:
+ buf.append(
+ "\n\nThe browser instead transmitted some file names. "
+ "This was submitted: %s" % ", ".join('"%s"' % x for x in form_matches)
+ )
+ self.msg = "".join(buf)
+
+ def __str__(self):
+ return self.msg
+
+
+class FormDataRoutingRedirect(AssertionError):
+ """This exception is raised by Flask in debug mode if it detects a
+ redirect caused by the routing system when the request method is not
+ GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
+ """
+
+ def __init__(self, request):
+ exc = request.routing_exception
+ buf = [
+ "A request was sent to this URL (%s) but a redirect was "
+ 'issued automatically by the routing system to "%s".'
+ % (request.url, exc.new_url)
+ ]
+
+ # In case just a slash was appended we can be extra helpful
+ if request.base_url + "/" == exc.new_url.split("?")[0]:
+ buf.append(
+ " The URL was defined with a trailing slash so "
+ "Flask will automatically redirect to the URL "
+ "with the trailing slash if it was accessed "
+ "without one."
+ )
+
+ buf.append(
+ " Make sure to directly send your %s-request to this URL "
+ "since we can't make browsers or HTTP clients redirect "
+ "with form data reliably or without user interaction." % request.method
+ )
+ buf.append("\n\nNote: this exception is only raised in debug mode")
+ AssertionError.__init__(self, "".join(buf).encode("utf-8"))
+
+
+def attach_enctype_error_multidict(request):
+ """Since Flask 0.8 we're monkeypatching the files object in case a
+ request is detected that does not use multipart form data but the files
+ object is accessed.
+ """
+ oldcls = request.files.__class__
+
+ class newcls(oldcls):
+ def __getitem__(self, key):
+ try:
+ return oldcls.__getitem__(self, key)
+ except KeyError:
+ if key not in request.form:
+ raise
+ raise DebugFilesKeyError(request, key)
+
+ newcls.__name__ = oldcls.__name__
+ newcls.__module__ = oldcls.__module__
+ request.files.__class__ = newcls
+
+
+def _dump_loader_info(loader):
+ yield "class: %s.%s" % (type(loader).__module__, type(loader).__name__)
+ for key, value in sorted(loader.__dict__.items()):
+ if key.startswith("_"):
+ continue
+ if isinstance(value, (tuple, list)):
+ if not all(isinstance(x, (str, text_type)) for x in value):
+ continue
+ yield "%s:" % key
+ for item in value:
+ yield " - %s" % item
+ continue
+ elif not isinstance(value, (str, text_type, int, float, bool)):
+ continue
+ yield "%s: %r" % (key, value)
+
+
+def explain_template_loading_attempts(app, template, attempts):
+ """This should help developers understand what failed"""
+ info = ['Locating template "%s":' % template]
+ total_found = 0
+ blueprint = None
+ reqctx = _request_ctx_stack.top
+ if reqctx is not None and reqctx.request.blueprint is not None:
+ blueprint = reqctx.request.blueprint
+
+ for idx, (loader, srcobj, triple) in enumerate(attempts):
+ if isinstance(srcobj, Flask):
+ src_info = 'application "%s"' % srcobj.import_name
+ elif isinstance(srcobj, Blueprint):
+ src_info = 'blueprint "%s" (%s)' % (srcobj.name, srcobj.import_name)
+ else:
+ src_info = repr(srcobj)
+
+ info.append("% 5d: trying loader of %s" % (idx + 1, src_info))
+
+ for line in _dump_loader_info(loader):
+ info.append(" %s" % line)
+
+ if triple is None:
+ detail = "no match"
+ else:
+ detail = "found (%r)" % (triple[1] or "")
+ total_found += 1
+ info.append(" -> %s" % detail)
+
+ seems_fishy = False
+ if total_found == 0:
+ info.append("Error: the template could not be found.")
+ seems_fishy = True
+ elif total_found > 1:
+ info.append("Warning: multiple loaders returned a match for the template.")
+ seems_fishy = True
+
+ if blueprint is not None and seems_fishy:
+ info.append(
+ " The template was looked up from an endpoint that "
+ 'belongs to the blueprint "%s".' % blueprint
+ )
+ info.append(" Maybe you did not place a template in the right folder?")
+ info.append(" See http://flask.pocoo.org/docs/blueprints/#templates")
+
+ app.logger.info("\n".join(info))
+
+
+def explain_ignored_app_run():
+ if os.environ.get("WERKZEUG_RUN_MAIN") != "true":
+ warn(
+ Warning(
+ "Silently ignoring app.run() because the "
+ "application is run from the flask command line "
+ "executable. Consider putting app.run() behind an "
+ 'if __name__ == "__main__" guard to silence this '
+ "warning."
+ ),
+ stacklevel=3,
+ )
diff --git a/matteo_env/Lib/site-packages/flask/globals.py b/matteo_env/Lib/site-packages/flask/globals.py
new file mode 100644
index 0000000..6d32dcf
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/globals.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.globals
+ ~~~~~~~~~~~~~
+
+ Defines all the global objects that are proxies to the current
+ active context.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+from functools import partial
+
+from werkzeug.local import LocalProxy
+from werkzeug.local import LocalStack
+
+
+_request_ctx_err_msg = """\
+Working outside of request context.
+
+This typically means that you attempted to use functionality that needed
+an active HTTP request. Consult the documentation on testing for
+information about how to avoid this problem.\
+"""
+_app_ctx_err_msg = """\
+Working outside of application context.
+
+This typically means that you attempted to use functionality that needed
+to interface with the current application object in some way. To solve
+this, set up an application context with app.app_context(). See the
+documentation for more information.\
+"""
+
+
+def _lookup_req_object(name):
+ top = _request_ctx_stack.top
+ if top is None:
+ raise RuntimeError(_request_ctx_err_msg)
+ return getattr(top, name)
+
+
+def _lookup_app_object(name):
+ top = _app_ctx_stack.top
+ if top is None:
+ raise RuntimeError(_app_ctx_err_msg)
+ return getattr(top, name)
+
+
+def _find_app():
+ top = _app_ctx_stack.top
+ if top is None:
+ raise RuntimeError(_app_ctx_err_msg)
+ return top.app
+
+
+# context locals
+_request_ctx_stack = LocalStack()
+_app_ctx_stack = LocalStack()
+current_app = LocalProxy(_find_app)
+request = LocalProxy(partial(_lookup_req_object, "request"))
+session = LocalProxy(partial(_lookup_req_object, "session"))
+g = LocalProxy(partial(_lookup_app_object, "g"))
diff --git a/matteo_env/Lib/site-packages/flask/helpers.py b/matteo_env/Lib/site-packages/flask/helpers.py
new file mode 100644
index 0000000..df06db8
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/helpers.py
@@ -0,0 +1,1155 @@
+# -*- coding: utf-8 -*-
+"""
+ flask.helpers
+ ~~~~~~~~~~~~~
+
+ Implements various helpers.
+
+ :copyright: 2010 Pallets
+ :license: BSD-3-Clause
+"""
+import io
+import mimetypes
+import os
+import pkgutil
+import posixpath
+import socket
+import sys
+import unicodedata
+from functools import update_wrapper
+from threading import RLock
+from time import time
+from zlib import adler32
+
+from jinja2 import FileSystemLoader
+from werkzeug.datastructures import Headers
+from werkzeug.exceptions import BadRequest
+from werkzeug.exceptions import NotFound
+from werkzeug.exceptions import RequestedRangeNotSatisfiable
+from werkzeug.routing import BuildError
+from werkzeug.urls import url_quote
+from werkzeug.wsgi import wrap_file
+
+from ._compat import fspath
+from ._compat import PY2
+from ._compat import string_types
+from ._compat import text_type
+from .globals import _app_ctx_stack
+from .globals import _request_ctx_stack
+from .globals import current_app
+from .globals import request
+from .globals import session
+from .signals import message_flashed
+
+# sentinel
+_missing = object()
+
+
+# what separators does this operating system provide that are not a slash?
+# this is used by the send_from_directory function to ensure that nobody is
+# able to access files from outside the filesystem.
+_os_alt_seps = list(
+ sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, "/")
+)
+
+
+def get_env():
+ """Get the environment the app is running in, indicated by the
+ :envvar:`FLASK_ENV` environment variable. The default is
+ ``'production'``.
+ """
+ return os.environ.get("FLASK_ENV") or "production"
+
+
+def get_debug_flag():
+ """Get whether debug mode should be enabled for the app, indicated
+ by the :envvar:`FLASK_DEBUG` environment variable. The default is
+ ``True`` if :func:`.get_env` returns ``'development'``, or ``False``
+ otherwise.
+ """
+ val = os.environ.get("FLASK_DEBUG")
+
+ if not val:
+ return get_env() == "development"
+
+ return val.lower() not in ("0", "false", "no")
+
+
+def get_load_dotenv(default=True):
+ """Get whether the user has disabled loading dotenv files by setting
+ :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the
+ files.
+
+ :param default: What to return if the env var isn't set.
+ """
+ val = os.environ.get("FLASK_SKIP_DOTENV")
+
+ if not val:
+ return default
+
+ return val.lower() in ("0", "false", "no")
+
+
+def _endpoint_from_view_func(view_func):
+ """Internal helper that returns the default endpoint for a given
+ function. This always is the function name.
+ """
+ assert view_func is not None, "expected view func if endpoint is not provided."
+ return view_func.__name__
+
+
+def stream_with_context(generator_or_function):
+ """Request contexts disappear when the response is started on the server.
+ This is done for efficiency reasons and to make it less likely to encounter
+ memory leaks with badly written WSGI middlewares. The downside is that if
+ you are using streamed responses, the generator cannot access request bound
+ information any more.
+
+ This function however can help you keep the context around for longer::
+
+ from flask import stream_with_context, request, Response
+
+ @app.route('/stream')
+ def streamed_response():
+ @stream_with_context
+ def generate():
+ yield 'Hello '
+ yield request.args['name']
+ yield '!'
+ return Response(generate())
+
+ Alternatively it can also be used around a specific generator::
+
+ from flask import stream_with_context, request, Response
+
+ @app.route('/stream')
+ def streamed_response():
+ def generate():
+ yield 'Hello '
+ yield request.args['name']
+ yield '!'
+ return Response(stream_with_context(generate()))
+
+ .. versionadded:: 0.9
+ """
+ try:
+ gen = iter(generator_or_function)
+ except TypeError:
+
+ def decorator(*args, **kwargs):
+ gen = generator_or_function(*args, **kwargs)
+ return stream_with_context(gen)
+
+ return update_wrapper(decorator, generator_or_function)
+
+ def generator():
+ ctx = _request_ctx_stack.top
+ if ctx is None:
+ raise RuntimeError(
+ "Attempted to stream with context but "
+ "there was no context in the first place to keep around."
+ )
+ with ctx:
+ # Dummy sentinel. Has to be inside the context block or we're
+ # not actually keeping the context around.
+ yield None
+
+ # The try/finally is here so that if someone passes a WSGI level
+ # iterator in we're still running the cleanup logic. Generators
+ # don't need that because they are closed on their destruction
+ # automatically.
+ try:
+ for item in gen:
+ yield item
+ finally:
+ if hasattr(gen, "close"):
+ gen.close()
+
+ # The trick is to start the generator. Then the code execution runs until
+ # the first dummy None is yielded at which point the context was already
+ # pushed. This item is discarded. Then when the iteration continues the
+ # real generator is executed.
+ wrapped_g = generator()
+ next(wrapped_g)
+ return wrapped_g
+
+
+def make_response(*args):
+ """Sometimes it is necessary to set additional headers in a view. Because
+ views do not have to return response objects but can return a value that
+ is converted into a response object by Flask itself, it becomes tricky to
+ add headers to it. This function can be called instead of using a return
+ and you will get a response object which you can use to attach headers.
+
+ If view looked like this and you want to add a new header::
+
+ def index():
+ return render_template('index.html', foo=42)
+
+ You can now do something like this::
+
+ def index():
+ response = make_response(render_template('index.html', foo=42))
+ response.headers['X-Parachutes'] = 'parachutes are cool'
+ return response
+
+ This function accepts the very same arguments you can return from a
+ view function. This for example creates a response with a 404 error
+ code::
+
+ response = make_response(render_template('not_found.html'), 404)
+
+ The other use case of this function is to force the return value of a
+ view function into a response which is helpful with view
+ decorators::
+
+ response = make_response(view_function())
+ response.headers['X-Parachutes'] = 'parachutes are cool'
+
+ Internally this function does the following things:
+
+ - if no arguments are passed, it creates a new response argument
+ - if one argument is passed, :meth:`flask.Flask.make_response`
+ is invoked with it.
+ - if more than one argument is passed, the arguments are passed
+ to the :meth:`flask.Flask.make_response` function as tuple.
+
+ .. versionadded:: 0.6
+ """
+ if not args:
+ return current_app.response_class()
+ if len(args) == 1:
+ args = args[0]
+ return current_app.make_response(args)
+
+
+def url_for(endpoint, **values):
+ """Generates a URL to the given endpoint with the method provided.
+
+ Variable arguments that are unknown to the target endpoint are appended
+ to the generated URL as query arguments. If the value of a query argument
+ is ``None``, the whole pair is skipped. In case blueprints are active
+ you can shortcut references to the same blueprint by prefixing the
+ local endpoint with a dot (``.``).
+
+ This will reference the index function local to the current blueprint::
+
+ url_for('.index')
+
+ For more information, head over to the :ref:`Quickstart `.
+
+ Configuration values ``APPLICATION_ROOT`` and ``SERVER_NAME`` are only used when
+ generating URLs outside of a request context.
+
+ To integrate applications, :class:`Flask` has a hook to intercept URL build
+ errors through :attr:`Flask.url_build_error_handlers`. The `url_for`
+ function results in a :exc:`~werkzeug.routing.BuildError` when the current
+ app does not have a URL for the given endpoint and values. When it does, the
+ :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if
+ it is not ``None``, which can return a string to use as the result of
+ `url_for` (instead of `url_for`'s default to raise the
+ :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception.
+ An example::
+
+ def external_url_handler(error, endpoint, values):
+ "Looks up an external URL when `url_for` cannot build a URL."
+ # This is an example of hooking the build_error_handler.
+ # Here, lookup_url is some utility function you've built
+ # which looks up the endpoint in some external URL registry.
+ url = lookup_url(endpoint, **values)
+ if url is None:
+ # External lookup did not have a URL.
+ # Re-raise the BuildError, in context of original traceback.
+ exc_type, exc_value, tb = sys.exc_info()
+ if exc_value is error:
+ raise exc_type, exc_value, tb
+ else:
+ raise error
+ # url_for will use this result, instead of raising BuildError.
+ return url
+
+ app.url_build_error_handlers.append(external_url_handler)
+
+ Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and
+ `endpoint` and `values` are the arguments passed into `url_for`. Note
+ that this is for building URLs outside the current application, and not for
+ handling 404 NotFound errors.
+
+ .. versionadded:: 0.10
+ The `_scheme` parameter was added.
+
+ .. versionadded:: 0.9
+ The `_anchor` and `_method` parameters were added.
+
+ .. versionadded:: 0.9
+ Calls :meth:`Flask.handle_build_error` on
+ :exc:`~werkzeug.routing.BuildError`.
+
+ :param endpoint: the endpoint of the URL (name of the function)
+ :param values: the variable arguments of the URL rule
+ :param _external: if set to ``True``, an absolute URL is generated. Server
+ address can be changed via ``SERVER_NAME`` configuration variable which
+ falls back to the `Host` header, then to the IP and port of the request.
+ :param _scheme: a string specifying the desired URL scheme. The `_external`
+ parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default
+ behavior uses the same scheme as the current request, or
+ ``PREFERRED_URL_SCHEME`` from the :ref:`app configuration ` if no
+ request context is available. As of Werkzeug 0.10, this also can be set
+ to an empty string to build protocol-relative URLs.
+ :param _anchor: if provided this is added as anchor to the URL.
+ :param _method: if provided this explicitly specifies an HTTP method.
+ """
+ appctx = _app_ctx_stack.top
+ reqctx = _request_ctx_stack.top
+
+ if appctx is None:
+ raise RuntimeError(
+ "Attempted to generate a URL without the application context being"
+ " pushed. This has to be executed when application context is"
+ " available."
+ )
+
+ # If request specific information is available we have some extra
+ # features that support "relative" URLs.
+ if reqctx is not None:
+ url_adapter = reqctx.url_adapter
+ blueprint_name = request.blueprint
+
+ if endpoint[:1] == ".":
+ if blueprint_name is not None:
+ endpoint = blueprint_name + endpoint
+ else:
+ endpoint = endpoint[1:]
+
+ external = values.pop("_external", False)
+
+ # Otherwise go with the url adapter from the appctx and make
+ # the URLs external by default.
+ else:
+ url_adapter = appctx.url_adapter
+
+ if url_adapter is None:
+ raise RuntimeError(
+ "Application was not able to create a URL adapter for request"
+ " independent URL generation. You might be able to fix this by"
+ " setting the SERVER_NAME config variable."
+ )
+
+ external = values.pop("_external", True)
+
+ anchor = values.pop("_anchor", None)
+ method = values.pop("_method", None)
+ scheme = values.pop("_scheme", None)
+ appctx.app.inject_url_defaults(endpoint, values)
+
+ # This is not the best way to deal with this but currently the
+ # underlying Werkzeug router does not support overriding the scheme on
+ # a per build call basis.
+ old_scheme = None
+ if scheme is not None:
+ if not external:
+ raise ValueError("When specifying _scheme, _external must be True")
+ old_scheme = url_adapter.url_scheme
+ url_adapter.url_scheme = scheme
+
+ try:
+ try:
+ rv = url_adapter.build(
+ endpoint, values, method=method, force_external=external
+ )
+ finally:
+ if old_scheme is not None:
+ url_adapter.url_scheme = old_scheme
+ except BuildError as error:
+ # We need to inject the values again so that the app callback can
+ # deal with that sort of stuff.
+ values["_external"] = external
+ values["_anchor"] = anchor
+ values["_method"] = method
+ values["_scheme"] = scheme
+ return appctx.app.handle_url_build_error(error, endpoint, values)
+
+ if anchor is not None:
+ rv += "#" + url_quote(anchor)
+ return rv
+
+
+def get_template_attribute(template_name, attribute):
+ """Loads a macro (or variable) a template exports. This can be used to
+ invoke a macro from within Python code. If you for example have a
+ template named :file:`_cider.html` with the following contents:
+
+ .. sourcecode:: html+jinja
+
+ {% macro hello(name) %}Hello {{ name }}!{% endmacro %}
+
+ You can access this from Python code like this::
+
+ hello = get_template_attribute('_cider.html', 'hello')
+ return hello('World')
+
+ .. versionadded:: 0.2
+
+ :param template_name: the name of the template
+ :param attribute: the name of the variable of macro to access
+ """
+ return getattr(current_app.jinja_env.get_template(template_name).module, attribute)
+
+
+def flash(message, category="message"):
+ """Flashes a message to the next request. In order to remove the
+ flashed message from the session and to display it to the user,
+ the template has to call :func:`get_flashed_messages`.
+
+ .. versionchanged:: 0.3
+ `category` parameter added.
+
+ :param message: the message to be flashed.
+ :param category: the category for the message. The following values
+ are recommended: ``'message'`` for any kind of message,
+ ``'error'`` for errors, ``'info'`` for information
+ messages and ``'warning'`` for warnings. However any
+ kind of string can be used as category.
+ """
+ # Original implementation:
+ #
+ # session.setdefault('_flashes', []).append((category, message))
+ #
+ # This assumed that changes made to mutable structures in the session are
+ # always in sync with the session object, which is not true for session
+ # implementations that use external storage for keeping their keys/values.
+ flashes = session.get("_flashes", [])
+ flashes.append((category, message))
+ session["_flashes"] = flashes
+ message_flashed.send(
+ current_app._get_current_object(), message=message, category=category
+ )
+
+
+def get_flashed_messages(with_categories=False, category_filter=()):
+ """Pulls all flashed messages from the session and returns them.
+ Further calls in the same request to the function will return
+ the same messages. By default just the messages are returned,
+ but when `with_categories` is set to ``True``, the return value will
+ be a list of tuples in the form ``(category, message)`` instead.
+
+ Filter the flashed messages to one or more categories by providing those
+ categories in `category_filter`. This allows rendering categories in
+ separate html blocks. The `with_categories` and `category_filter`
+ arguments are distinct:
+
+ * `with_categories` controls whether categories are returned with message
+ text (``True`` gives a tuple, where ``False`` gives just the message text).
+ * `category_filter` filters the messages down to only those matching the
+ provided categories.
+
+ See :ref:`message-flashing-pattern` for examples.
+
+ .. versionchanged:: 0.3
+ `with_categories` parameter added.
+
+ .. versionchanged:: 0.9
+ `category_filter` parameter added.
+
+ :param with_categories: set to ``True`` to also receive categories.
+ :param category_filter: whitelist of categories to limit return values
+ """
+ flashes = _request_ctx_stack.top.flashes
+ if flashes is None:
+ _request_ctx_stack.top.flashes = flashes = (
+ session.pop("_flashes") if "_flashes" in session else []
+ )
+ if category_filter:
+ flashes = list(filter(lambda f: f[0] in category_filter, flashes))
+ if not with_categories:
+ return [x[1] for x in flashes]
+ return flashes
+
+
+def send_file(
+ filename_or_fp,
+ mimetype=None,
+ as_attachment=False,
+ attachment_filename=None,
+ add_etags=True,
+ cache_timeout=None,
+ conditional=False,
+ last_modified=None,
+):
+ """Sends the contents of a file to the client. This will use the
+ most efficient method available and configured. By default it will
+ try to use the WSGI server's file_wrapper support. Alternatively
+ you can set the application's :attr:`~Flask.use_x_sendfile` attribute
+ to ``True`` to directly emit an ``X-Sendfile`` header. This however
+ requires support of the underlying webserver for ``X-Sendfile``.
+
+ By default it will try to guess the mimetype for you, but you can
+ also explicitly provide one. For extra security you probably want
+ to send certain files as attachment (HTML for instance). The mimetype
+ guessing requires a `filename` or an `attachment_filename` to be
+ provided.
+
+ ETags will also be attached automatically if a `filename` is provided. You
+ can turn this off by setting `add_etags=False`.
+
+ If `conditional=True` and `filename` is provided, this method will try to
+ upgrade the response stream to support range requests. This will allow
+ the request to be answered with partial content response.
+
+ Please never pass filenames to this function from user sources;
+ you should use :func:`send_from_directory` instead.
+
+ .. versionadded:: 0.2
+
+ .. versionadded:: 0.5
+ The `add_etags`, `cache_timeout` and `conditional` parameters were
+ added. The default behavior is now to attach etags.
+
+ .. versionchanged:: 0.7
+ mimetype guessing and etag support for file objects was
+ deprecated because it was unreliable. Pass a filename if you are
+ able to, otherwise attach an etag yourself. This functionality
+ will be removed in Flask 1.0
+
+ .. versionchanged:: 0.9
+ cache_timeout pulls its default from application config, when None.
+
+ .. versionchanged:: 0.12
+ The filename is no longer automatically inferred from file objects. If
+ you want to use automatic mimetype and etag support, pass a filepath via
+ `filename_or_fp` or `attachment_filename`.
+
+ .. versionchanged:: 0.12
+ The `attachment_filename` is preferred over `filename` for MIME-type
+ detection.
+
+ .. versionchanged:: 1.0
+ UTF-8 filenames, as specified in `RFC 2231`_, are supported.
+
+ .. _RFC 2231: https://tools.ietf.org/html/rfc2231#section-4
+
+ .. versionchanged:: 1.0.3
+ Filenames are encoded with ASCII instead of Latin-1 for broader
+ compatibility with WSGI servers.
+
+ .. versionchanged:: 1.1
+ Filename may be a :class:`~os.PathLike` object.
+
+ .. versionadded:: 1.1
+ Partial content supports :class:`~io.BytesIO`.
+
+ :param filename_or_fp: the filename of the file to send.
+ This is relative to the :attr:`~Flask.root_path`
+ if a relative path is specified.
+ Alternatively a file object might be provided in
+ which case ``X-Sendfile`` might not work and fall
+ back to the traditional method. Make sure that the
+ file pointer is positioned at the start of data to
+ send before calling :func:`send_file`.
+ :param mimetype: the mimetype of the file if provided. If a file path is
+ given, auto detection happens as fallback, otherwise an
+ error will be raised.
+ :param as_attachment: set to ``True`` if you want to send this file with
+ a ``Content-Disposition: attachment`` header.
+ :param attachment_filename: the filename for the attachment if it
+ differs from the file's filename.
+ :param add_etags: set to ``False`` to disable attaching of etags.
+ :param conditional: set to ``True`` to enable conditional responses.
+
+ :param cache_timeout: the timeout in seconds for the headers. When ``None``
+ (default), this value is set by
+ :meth:`~Flask.get_send_file_max_age` of
+ :data:`~flask.current_app`.
+ :param last_modified: set the ``Last-Modified`` header to this value,
+ a :class:`~datetime.datetime` or timestamp.
+ If a file was passed, this overrides its mtime.
+ """
+ mtime = None
+ fsize = None
+
+ if hasattr(filename_or_fp, "__fspath__"):
+ filename_or_fp = fspath(filename_or_fp)
+
+ if isinstance(filename_or_fp, string_types):
+ filename = filename_or_fp
+ if not os.path.isabs(filename):
+ filename = os.path.join(current_app.root_path, filename)
+ file = None
+ if attachment_filename is None:
+ attachment_filename = os.path.basename(filename)
+ else:
+ file = filename_or_fp
+ filename = None
+
+ if mimetype is None:
+ if attachment_filename is not None:
+ mimetype = (
+ mimetypes.guess_type(attachment_filename)[0]
+ or "application/octet-stream"
+ )
+
+ if mimetype is None:
+ raise ValueError(
+ "Unable to infer MIME-type because no filename is available. "
+ "Please set either `attachment_filename`, pass a filepath to "
+ "`filename_or_fp` or set your own MIME-type via `mimetype`."
+ )
+
+ headers = Headers()
+ if as_attachment:
+ if attachment_filename is None:
+ raise TypeError("filename unavailable, required for sending as attachment")
+
+ if not isinstance(attachment_filename, text_type):
+ attachment_filename = attachment_filename.decode("utf-8")
+
+ try:
+ attachment_filename = attachment_filename.encode("ascii")
+ except UnicodeEncodeError:
+ filenames = {
+ "filename": unicodedata.normalize("NFKD", attachment_filename).encode(
+ "ascii", "ignore"
+ ),
+ "filename*": "UTF-8''%s" % url_quote(attachment_filename, safe=b""),
+ }
+ else:
+ filenames = {"filename": attachment_filename}
+
+ headers.add("Content-Disposition", "attachment", **filenames)
+
+ if current_app.use_x_sendfile and filename:
+ if file is not None:
+ file.close()
+ headers["X-Sendfile"] = filename
+ fsize = os.path.getsize(filename)
+ headers["Content-Length"] = fsize
+ data = None
+ else:
+ if file is None:
+ file = open(filename, "rb")
+ mtime = os.path.getmtime(filename)
+ fsize = os.path.getsize(filename)
+ headers["Content-Length"] = fsize
+ elif isinstance(file, io.BytesIO):
+ try:
+ fsize = file.getbuffer().nbytes
+ except AttributeError:
+ # Python 2 doesn't have getbuffer
+ fsize = len(file.getvalue())
+ headers["Content-Length"] = fsize
+ data = wrap_file(request.environ, file)
+
+ rv = current_app.response_class(
+ data, mimetype=mimetype, headers=headers, direct_passthrough=True
+ )
+
+ if last_modified is not None:
+ rv.last_modified = last_modified
+ elif mtime is not None:
+ rv.last_modified = mtime
+
+ rv.cache_control.public = True
+ if cache_timeout is None:
+ cache_timeout = current_app.get_send_file_max_age(filename)
+ if cache_timeout is not None:
+ rv.cache_control.max_age = cache_timeout
+ rv.expires = int(time() + cache_timeout)
+
+ if add_etags and filename is not None:
+ from warnings import warn
+
+ try:
+ rv.set_etag(
+ "%s-%s-%s"
+ % (
+ os.path.getmtime(filename),
+ os.path.getsize(filename),
+ adler32(
+ filename.encode("utf-8")
+ if isinstance(filename, text_type)
+ else filename
+ )
+ & 0xFFFFFFFF,
+ )
+ )
+ except OSError:
+ warn(
+ "Access %s failed, maybe it does not exist, so ignore etags in "
+ "headers" % filename,
+ stacklevel=2,
+ )
+
+ if conditional:
+ try:
+ rv = rv.make_conditional(request, accept_ranges=True, complete_length=fsize)
+ except RequestedRangeNotSatisfiable:
+ if file is not None:
+ file.close()
+ raise
+ # make sure we don't send x-sendfile for servers that
+ # ignore the 304 status code for x-sendfile.
+ if rv.status_code == 304:
+ rv.headers.pop("x-sendfile", None)
+ return rv
+
+
+def safe_join(directory, *pathnames):
+ """Safely join `directory` and zero or more untrusted `pathnames`
+ components.
+
+ Example usage::
+
+ @app.route('/wiki/')
+ def wiki_page(filename):
+ filename = safe_join(app.config['WIKI_FOLDER'], filename)
+ with open(filename, 'rb') as fd:
+ content = fd.read() # Read and process the file content...
+
+ :param directory: the trusted base directory.
+ :param pathnames: the untrusted pathnames relative to that directory.
+ :raises: :class:`~werkzeug.exceptions.NotFound` if one or more passed
+ paths fall out of its boundaries.
+ """
+
+ parts = [directory]
+
+ for filename in pathnames:
+ if filename != "":
+ filename = posixpath.normpath(filename)
+
+ if (
+ any(sep in filename for sep in _os_alt_seps)
+ or os.path.isabs(filename)
+ or filename == ".."
+ or filename.startswith("../")
+ ):
+ raise NotFound()
+
+ parts.append(filename)
+
+ return posixpath.join(*parts)
+
+
+def send_from_directory(directory, filename, **options):
+ """Send a file from a given directory with :func:`send_file`. This
+ is a secure way to quickly expose static files from an upload folder
+ or something similar.
+
+ Example usage::
+
+ @app.route('/uploads/')
+ def download_file(filename):
+ return send_from_directory(app.config['UPLOAD_FOLDER'],
+ filename, as_attachment=True)
+
+ .. admonition:: Sending files and Performance
+
+ It is strongly recommended to activate either ``X-Sendfile`` support in
+ your webserver or (if no authentication happens) to tell the webserver
+ to serve files for the given path on its own without calling into the
+ web application for improved performance.
+
+ .. versionadded:: 0.5
+
+ :param directory: the directory where all the files are stored.
+ :param filename: the filename relative to that directory to
+ download.
+ :param options: optional keyword arguments that are directly
+ forwarded to :func:`send_file`.
+ """
+ filename = fspath(filename)
+ directory = fspath(directory)
+ filename = safe_join(directory, filename)
+ if not os.path.isabs(filename):
+ filename = os.path.join(current_app.root_path, filename)
+ try:
+ if not os.path.isfile(filename):
+ raise NotFound()
+ except (TypeError, ValueError):
+ raise BadRequest()
+ options.setdefault("conditional", True)
+ return send_file(filename, **options)
+
+
+def get_root_path(import_name):
+ """Returns the path to a package or cwd if that cannot be found. This
+ returns the path of a package or the folder that contains a module.
+
+ Not to be confused with the package path returned by :func:`find_package`.
+ """
+ # Module already imported and has a file attribute. Use that first.
+ mod = sys.modules.get(import_name)
+ if mod is not None and hasattr(mod, "__file__"):
+ return os.path.dirname(os.path.abspath(mod.__file__))
+
+ # Next attempt: check the loader.
+ loader = pkgutil.get_loader(import_name)
+
+ # Loader does not exist or we're referring to an unloaded main module
+ # or a main module without path (interactive sessions), go with the
+ # current working directory.
+ if loader is None or import_name == "__main__":
+ return os.getcwd()
+
+ # For .egg, zipimporter does not have get_filename until Python 2.7.
+ # Some other loaders might exhibit the same behavior.
+ if hasattr(loader, "get_filename"):
+ filepath = loader.get_filename(import_name)
+ else:
+ # Fall back to imports.
+ __import__(import_name)
+ mod = sys.modules[import_name]
+ filepath = getattr(mod, "__file__", None)
+
+ # If we don't have a filepath it might be because we are a
+ # namespace package. In this case we pick the root path from the
+ # first module that is contained in our package.
+ if filepath is None:
+ raise RuntimeError(
+ "No root path can be found for the provided "
+ 'module "%s". This can happen because the '
+ "module came from an import hook that does "
+ "not provide file name information or because "
+ "it's a namespace package. In this case "
+ "the root path needs to be explicitly "
+ "provided." % import_name
+ )
+
+ # filepath is import_name.py for a module, or __init__.py for a package.
+ return os.path.dirname(os.path.abspath(filepath))
+
+
+def _matching_loader_thinks_module_is_package(loader, mod_name):
+ """Given the loader that loaded a module and the module this function
+ attempts to figure out if the given module is actually a package.
+ """
+ # If the loader can tell us if something is a package, we can
+ # directly ask the loader.
+ if hasattr(loader, "is_package"):
+ return loader.is_package(mod_name)
+ # importlib's namespace loaders do not have this functionality but
+ # all the modules it loads are packages, so we can take advantage of
+ # this information.
+ elif (
+ loader.__class__.__module__ == "_frozen_importlib"
+ and loader.__class__.__name__ == "NamespaceLoader"
+ ):
+ return True
+ # Otherwise we need to fail with an error that explains what went
+ # wrong.
+ raise AttributeError(
+ (
+ "%s.is_package() method is missing but is required by Flask of "
+ "PEP 302 import hooks. If you do not use import hooks and "
+ "you encounter this error please file a bug against Flask."
+ )
+ % loader.__class__.__name__
+ )
+
+
+def _find_package_path(root_mod_name):
+ """Find the path where the module's root exists in"""
+ if sys.version_info >= (3, 4):
+ import importlib.util
+
+ try:
+ spec = importlib.util.find_spec(root_mod_name)
+ if spec is None:
+ raise ValueError("not found")
+ # ImportError: the machinery told us it does not exist
+ # ValueError:
+ # - the module name was invalid
+ # - the module name is __main__
+ # - *we* raised `ValueError` due to `spec` being `None`
+ except (ImportError, ValueError):
+ pass # handled below
+ else:
+ # namespace package
+ if spec.origin in {"namespace", None}:
+ return os.path.dirname(next(iter(spec.submodule_search_locations)))
+ # a package (with __init__.py)
+ elif spec.submodule_search_locations:
+ return os.path.dirname(os.path.dirname(spec.origin))
+ # just a normal module
+ else:
+ return os.path.dirname(spec.origin)
+
+ # we were unable to find the `package_path` using PEP 451 loaders
+ loader = pkgutil.get_loader(root_mod_name)
+ if loader is None or root_mod_name == "__main__":
+ # import name is not found, or interactive/main module
+ return os.getcwd()
+ else:
+ # For .egg, zipimporter does not have get_filename until Python 2.7.
+ if hasattr(loader, "get_filename"):
+ filename = loader.get_filename(root_mod_name)
+ elif hasattr(loader, "archive"):
+ # zipimporter's loader.archive points to the .egg or .zip
+ # archive filename is dropped in call to dirname below.
+ filename = loader.archive
+ else:
+ # At least one loader is missing both get_filename and archive:
+ # Google App Engine's HardenedModulesHook
+ #
+ # Fall back to imports.
+ __import__(root_mod_name)
+ filename = sys.modules[root_mod_name].__file__
+ package_path = os.path.abspath(os.path.dirname(filename))
+
+ # In case the root module is a package we need to chop of the
+ # rightmost part. This needs to go through a helper function
+ # because of python 3.3 namespace packages.
+ if _matching_loader_thinks_module_is_package(loader, root_mod_name):
+ package_path = os.path.dirname(package_path)
+
+ return package_path
+
+
+def find_package(import_name):
+ """Finds a package and returns the prefix (or None if the package is
+ not installed) as well as the folder that contains the package or
+ module as a tuple. The package path returned is the module that would
+ have to be added to the pythonpath in order to make it possible to
+ import the module. The prefix is the path below which a UNIX like
+ folder structure exists (lib, share etc.).
+ """
+ root_mod_name, _, _ = import_name.partition(".")
+ package_path = _find_package_path(root_mod_name)
+ site_parent, site_folder = os.path.split(package_path)
+ py_prefix = os.path.abspath(sys.prefix)
+ if package_path.startswith(py_prefix):
+ return py_prefix, package_path
+ elif site_folder.lower() == "site-packages":
+ parent, folder = os.path.split(site_parent)
+ # Windows like installations
+ if folder.lower() == "lib":
+ base_dir = parent
+ # UNIX like installations
+ elif os.path.basename(parent).lower() == "lib":
+ base_dir = os.path.dirname(parent)
+ else:
+ base_dir = site_parent
+ return base_dir, package_path
+ return None, package_path
+
+
+class locked_cached_property(object):
+ """A decorator that converts a function into a lazy property. The
+ function wrapped is called the first time to retrieve the result
+ and then that calculated result is used the next time you access
+ the value. Works like the one in Werkzeug but has a lock for
+ thread safety.
+ """
+
+ def __init__(self, func, name=None, doc=None):
+ self.__name__ = name or func.__name__
+ self.__module__ = func.__module__
+ self.__doc__ = doc or func.__doc__
+ self.func = func
+ self.lock = RLock()
+
+ def __get__(self, obj, type=None):
+ if obj is None:
+ return self
+ with self.lock:
+ value = obj.__dict__.get(self.__name__, _missing)
+ if value is _missing:
+ value = self.func(obj)
+ obj.__dict__[self.__name__] = value
+ return value
+
+
+class _PackageBoundObject(object):
+ #: The name of the package or module that this app belongs to. Do not
+ #: change this once it is set by the constructor.
+ import_name = None
+
+ #: Location of the template files to be added to the template lookup.
+ #: ``None`` if templates should not be added.
+ template_folder = None
+
+ #: Absolute path to the package on the filesystem. Used to look up
+ #: resources contained in the package.
+ root_path = None
+
+ def __init__(self, import_name, template_folder=None, root_path=None):
+ self.import_name = import_name
+ self.template_folder = template_folder
+
+ if root_path is None:
+ root_path = get_root_path(self.import_name)
+
+ self.root_path = root_path
+ self._static_folder = None
+ self._static_url_path = None
+
+ # circular import
+ from .cli import AppGroup
+
+ #: The Click command group for registration of CLI commands
+ #: on the application and associated blueprints. These commands
+ #: are accessible via the :command:`flask` command once the
+ #: application has been discovered and blueprints registered.
+ self.cli = AppGroup()
+
+ @property
+ def static_folder(self):
+ """The absolute path to the configured static folder."""
+ if self._static_folder is not None:
+ return os.path.join(self.root_path, self._static_folder)
+
+ @static_folder.setter
+ def static_folder(self, value):
+ if value is not None:
+ value = value.rstrip("/\\")
+ self._static_folder = value
+
+ @property
+ def static_url_path(self):
+ """The URL prefix that the static route will be accessible from.
+
+ If it was not configured during init, it is derived from
+ :attr:`static_folder`.
+ """
+ if self._static_url_path is not None:
+ return self._static_url_path
+
+ if self.static_folder is not None:
+ basename = os.path.basename(self.static_folder)
+ return ("/" + basename).rstrip("/")
+
+ @static_url_path.setter
+ def static_url_path(self, value):
+ if value is not None:
+ value = value.rstrip("/")
+
+ self._static_url_path = value
+
+ @property
+ def has_static_folder(self):
+ """This is ``True`` if the package bound object's container has a
+ folder for static files.
+
+ .. versionadded:: 0.5
+ """
+ return self.static_folder is not None
+
+ @locked_cached_property
+ def jinja_loader(self):
+ """The Jinja loader for this package bound object.
+
+ .. versionadded:: 0.5
+ """
+ if self.template_folder is not None:
+ return FileSystemLoader(os.path.join(self.root_path, self.template_folder))
+
+ def get_send_file_max_age(self, filename):
+ """Provides default cache_timeout for the :func:`send_file` functions.
+
+ By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from
+ the configuration of :data:`~flask.current_app`.
+
+ Static file functions such as :func:`send_from_directory` use this
+ function, and :func:`send_file` calls this function on
+ :data:`~flask.current_app` when the given cache_timeout is ``None``. If a
+ cache_timeout is given in :func:`send_file`, that timeout is used;
+ otherwise, this method is called.
+
+ This allows subclasses to change the behavior when sending files based
+ on the filename. For example, to set the cache timeout for .js files
+ to 60 seconds::
+
+ class MyFlask(flask.Flask):
+ def get_send_file_max_age(self, name):
+ if name.lower().endswith('.js'):
+ return 60
+ return flask.Flask.get_send_file_max_age(self, name)
+
+ .. versionadded:: 0.9
+ """
+ return total_seconds(current_app.send_file_max_age_default)
+
+ def send_static_file(self, filename):
+ """Function used internally to send static files from the static
+ folder to the browser.
+
+ .. versionadded:: 0.5
+ """
+ if not self.has_static_folder:
+ raise RuntimeError("No static folder for this object")
+ # Ensure get_send_file_max_age is called in all cases.
+ # Here, we ensure get_send_file_max_age is called for Blueprints.
+ cache_timeout = self.get_send_file_max_age(filename)
+ return send_from_directory(
+ self.static_folder, filename, cache_timeout=cache_timeout
+ )
+
+ def open_resource(self, resource, mode="rb"):
+ """Opens a resource from the application's resource folder. To see
+ how this works, consider the following folder structure::
+
+ /myapplication.py
+ /schema.sql
+ /static
+ /style.css
+ /templates
+ /layout.html
+ /index.html
+
+ If you want to open the :file:`schema.sql` file you would do the
+ following::
+
+ with app.open_resource('schema.sql') as f:
+ contents = f.read()
+ do_something_with(contents)
+
+ :param resource: the name of the resource. To access resources within
+ subfolders use forward slashes as separator.
+ :param mode: Open file in this mode. Only reading is supported,
+ valid values are "r" (or "rt") and "rb".
+ """
+ if mode not in {"r", "rt", "rb"}:
+ raise ValueError("Resources can only be opened for reading")
+
+ return open(os.path.join(self.root_path, resource), mode)
+
+
+def total_seconds(td):
+ """Returns the total seconds from a timedelta object.
+
+ :param timedelta td: the timedelta to be converted in seconds
+
+ :returns: number of seconds
+ :rtype: int
+ """
+ return td.days * 60 * 60 * 24 + td.seconds
+
+
+def is_ip(value):
+ """Determine if the given string is an IP address.
+
+ Python 2 on Windows doesn't provide ``inet_pton``, so this only
+ checks IPv4 addresses in that environment.
+
+ :param value: value to check
+ :type value: str
+
+ :return: True if string is an IP address
+ :rtype: bool
+ """
+ if PY2 and os.name == "nt":
+ try:
+ socket.inet_aton(value)
+ return True
+ except socket.error:
+ return False
+
+ for family in (socket.AF_INET, socket.AF_INET6):
+ try:
+ socket.inet_pton(family, value)
+ except socket.error:
+ pass
+ else:
+ return True
+
+ return False
diff --git a/matteo_env/Lib/site-packages/flask/json/__init__.py b/matteo_env/Lib/site-packages/flask/json/__init__.py
new file mode 100644
index 0000000..a141068
--- /dev/null
+++ b/matteo_env/Lib/site-packages/flask/json/__init__.py
@@ -0,0 +1,376 @@
+# -*- coding: utf-8 -*-
+"""
+flask.json
+~~~~~~~~~~
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+import codecs
+import io
+import uuid
+from datetime import date
+from datetime import datetime
+
+from itsdangerous import json as _json
+from jinja2 import Markup
+from werkzeug.http import http_date
+
+from .._compat import PY2
+from .._compat import text_type
+from ..globals import current_app
+from ..globals import request
+
+try:
+ import dataclasses
+except ImportError:
+ dataclasses = None
+
+# Figure out if simplejson escapes slashes. This behavior was changed
+# from one version to another without reason.
+_slash_escape = "\\/" not in _json.dumps("/")
+
+
+__all__ = [
+ "dump",
+ "dumps",
+ "load",
+ "loads",
+ "htmlsafe_dump",
+ "htmlsafe_dumps",
+ "JSONDecoder",
+ "JSONEncoder",
+ "jsonify",
+]
+
+
+def _wrap_reader_for_text(fp, encoding):
+ if isinstance(fp.read(0), bytes):
+ fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
+ return fp
+
+
+def _wrap_writer_for_text(fp, encoding):
+ try:
+ fp.write("")
+ except TypeError:
+ fp = io.TextIOWrapper(fp, encoding)
+ return fp
+
+
+class JSONEncoder(_json.JSONEncoder):
+ """The default Flask JSON encoder. This one extends the default
+ encoder by also supporting ``datetime``, ``UUID``, ``dataclasses``,
+ and ``Markup`` objects.
+
+ ``datetime`` objects are serialized as RFC 822 datetime strings.
+ This is the same as the HTTP date format.
+
+ In order to support more data types, override the :meth:`default`
+ method.
+ """
+
+ def default(self, o):
+ """Implement this method in a subclass such that it returns a
+ serializable object for ``o``, or calls the base implementation (to
+ raise a :exc:`TypeError`).
+
+ For example, to support arbitrary iterators, you could implement
+ default like this::
+
+ def default(self, o):
+ try:
+ iterable = iter(o)
+ except TypeError:
+ pass
+ else:
+ return list(iterable)
+ return JSONEncoder.default(self, o)
+ """
+ if isinstance(o, datetime):
+ return http_date(o.utctimetuple())
+ if isinstance(o, date):
+ return http_date(o.timetuple())
+ if isinstance(o, uuid.UUID):
+ return str(o)
+ if dataclasses and dataclasses.is_dataclass(o):
+ return dataclasses.asdict(o)
+ if hasattr(o, "__html__"):
+ return text_type(o.__html__())
+ return _json.JSONEncoder.default(self, o)
+
+
+class JSONDecoder(_json.JSONDecoder):
+ """The default JSON decoder. This one does not change the behavior from
+ the default simplejson decoder. Consult the :mod:`json` documentation
+ for more information. This decoder is not only used for the load
+ functions of this module but also :attr:`~flask.Request`.
+ """
+
+
+def _dump_arg_defaults(kwargs, app=None):
+ """Inject default arguments for dump functions."""
+ if app is None:
+ app = current_app
+
+ if app:
+ bp = app.blueprints.get(request.blueprint) if request else None
+ kwargs.setdefault(
+ "cls", bp.json_encoder if bp and bp.json_encoder else app.json_encoder
+ )
+
+ if not app.config["JSON_AS_ASCII"]:
+ kwargs.setdefault("ensure_ascii", False)
+
+ kwargs.setdefault("sort_keys", app.config["JSON_SORT_KEYS"])
+ else:
+ kwargs.setdefault("sort_keys", True)
+ kwargs.setdefault("cls", JSONEncoder)
+
+
+def _load_arg_defaults(kwargs, app=None):
+ """Inject default arguments for load functions."""
+ if app is None:
+ app = current_app
+
+ if app:
+ bp = app.blueprints.get(request.blueprint) if request else None
+ kwargs.setdefault(
+ "cls", bp.json_decoder if bp and bp.json_decoder else app.json_decoder
+ )
+ else:
+ kwargs.setdefault("cls", JSONDecoder)
+
+
+def detect_encoding(data):
+ """Detect which UTF codec was used to encode the given bytes.
+
+ The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is
+ accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big
+ or little endian. Some editors or libraries may prepend a BOM.
+
+ :param data: Bytes in unknown UTF encoding.
+ :return: UTF encoding name
+ """
+ head = data[:4]
+
+ if head[:3] == codecs.BOM_UTF8:
+ return "utf-8-sig"
+
+ if b"\x00" not in head:
+ return "utf-8"
+
+ if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE):
+ return "utf-32"
+
+ if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE):
+ return "utf-16"
+
+ if len(head) == 4:
+ if head[:3] == b"\x00\x00\x00":
+ return "utf-32-be"
+
+ if head[::2] == b"\x00\x00":
+ return "utf-16-be"
+
+ if head[1:] == b"\x00\x00\x00":
+ return "utf-32-le"
+
+ if head[1::2] == b"\x00\x00":
+ return "utf-16-le"
+
+ if len(head) == 2:
+ return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le"
+
+ return "utf-8"
+
+
+def dumps(obj, app=None, **kwargs):
+ """Serialize ``obj`` to a JSON-formatted string. If there is an
+ app context pushed, use the current app's configured encoder
+ (:attr:`~flask.Flask.json_encoder`), or fall back to the default
+ :class:`JSONEncoder`.
+
+ Takes the same arguments as the built-in :func:`json.dumps`, and
+ does some extra configuration based on the application. If the
+ simplejson package is installed, it is preferred.
+
+ :param obj: Object to serialize to JSON.
+ :param app: App instance to use to configure the JSON encoder.
+ Uses ``current_app`` if not given, and falls back to the default
+ encoder when not in an app context.
+ :param kwargs: Extra arguments passed to :func:`json.dumps`.
+
+ .. versionchanged:: 1.0.3
+
+ ``app`` can be passed directly, rather than requiring an app
+ context for configuration.
+ """
+ _dump_arg_defaults(kwargs, app=app)
+ encoding = kwargs.pop("encoding", None)
+ rv = _json.dumps(obj, **kwargs)
+ if encoding is not None and isinstance(rv, text_type):
+ rv = rv.encode(encoding)
+ return rv
+
+
+def dump(obj, fp, app=None, **kwargs):
+ """Like :func:`dumps` but writes into a file object."""
+ _dump_arg_defaults(kwargs, app=app)
+ encoding = kwargs.pop("encoding", None)
+ if encoding is not None:
+ fp = _wrap_writer_for_text(fp, encoding)
+ _json.dump(obj, fp, **kwargs)
+
+
+def loads(s, app=None, **kwargs):
+ """Deserialize an object from a JSON-formatted string ``s``. If
+ there is an app context pushed, use the current app's configured
+ decoder (:attr:`~flask.Flask.json_decoder`), or fall back to the
+ default :class:`JSONDecoder`.
+
+ Takes the same arguments as the built-in :func:`json.loads`, and
+ does some extra configuration based on the application. If the
+ simplejson package is installed, it is preferred.
+
+ :param s: JSON string to deserialize.
+ :param app: App instance to use to configure the JSON decoder.
+ Uses ``current_app`` if not given, and falls back to the default
+ encoder when not in an app context.
+ :param kwargs: Extra arguments passed to :func:`json.dumps`.
+
+ .. versionchanged:: 1.0.3
+
+ ``app`` can be passed directly, rather than requiring an app
+ context for configuration.
+ """
+ _load_arg_defaults(kwargs, app=app)
+ if isinstance(s, bytes):
+ encoding = kwargs.pop("encoding", None)
+ if encoding is None:
+ encoding = detect_encoding(s)
+ s = s.decode(encoding)
+ return _json.loads(s, **kwargs)
+
+
+def load(fp, app=None, **kwargs):
+ """Like :func:`loads` but reads from a file object."""
+ _load_arg_defaults(kwargs, app=app)
+ if not PY2:
+ fp = _wrap_reader_for_text(fp, kwargs.pop("encoding", None) or "utf-8")
+ return _json.load(fp, **kwargs)
+
+
+def htmlsafe_dumps(obj, **kwargs):
+ """Works exactly like :func:`dumps` but is safe for use in ``
+
+
+
+
+ ⚾ The Simmadome
+
+
+
+
+
+
+
+