Merge pull request #84 from Sakimori/redacted
website branch is... done. we think.
This commit is contained in:
commit
20e7e80760
|
@ -1,7 +1,7 @@
|
|||
# matteo-the-prestige
|
||||
# simsim discord bot
|
||||
|
||||
blaseball, blaseball, is back! in an unofficial capacity.
|
||||
blaseball, blaseball, is back! in an unofficial capacity. this is completely unaffiliated with the game band
|
||||
|
||||
custom players, custom teams, custom leagues (that last one is coming soon™) all in discord!
|
||||
|
||||
|
|
22
games.py
22
games.py
|
@ -107,11 +107,11 @@ class team(object):
|
|||
self.slogan = None
|
||||
|
||||
def add_lineup(self, new_player):
|
||||
if len(self.lineup) <= 12:
|
||||
if len(self.lineup) < 20:
|
||||
self.lineup.append(new_player)
|
||||
return (True,)
|
||||
else:
|
||||
return (False, "12 players in the lineup, maximum. We're being generous here.")
|
||||
return (False, "20 players in the lineup, maximum. We're being really generous here.")
|
||||
|
||||
def set_pitcher(self, new_player):
|
||||
self.pitcher = new_player
|
||||
|
@ -149,6 +149,7 @@ class game(object):
|
|||
self.last_update = ({},0) #this is a ({outcome}, runs) tuple
|
||||
self.owner = None
|
||||
self.ready = False
|
||||
self.victory_lap = False
|
||||
if length is not None:
|
||||
self.max_innings = length
|
||||
else:
|
||||
|
@ -175,9 +176,12 @@ class game(object):
|
|||
batter = self.get_batter()
|
||||
|
||||
if self.top_of_inning:
|
||||
defender = random.choice(self.teams["home"].lineup)
|
||||
defender_list = self.teams["home"].lineup.copy()
|
||||
else:
|
||||
defender = random.choice(self.teams["away"].lineup)
|
||||
defender_list = self.teams["away"].lineup.copy()
|
||||
|
||||
defender_list.append(pitcher)
|
||||
defender = random.choice(defender_list) #make pitchers field
|
||||
|
||||
outcome["batter"] = batter
|
||||
outcome["defender"] = ""
|
||||
|
@ -530,6 +534,16 @@ class game(object):
|
|||
"home_pitcher" : self.teams["home"].pitcher
|
||||
}
|
||||
|
||||
def named_bases(self):
|
||||
name_bases = {}
|
||||
for base in range(1,4):
|
||||
if self.bases[base] is not None:
|
||||
name_bases[base] = self.bases[base].name
|
||||
else:
|
||||
name_bases[base] = None
|
||||
|
||||
return name_bases
|
||||
|
||||
|
||||
def gamestate_update_full(self):
|
||||
attempts = self.thievery_attempts()
|
||||
|
|
129
main_controller.py
Normal file
129
main_controller.py
Normal file
|
@ -0,0 +1,129 @@
|
|||
import asyncio, time, datetime, games, json, threading
|
||||
from flask import Flask, url_for, Response, render_template, request, jsonify
|
||||
from flask_socketio import SocketIO, emit
|
||||
|
||||
app = Flask("the-prestige")
|
||||
app.config['SECRET KEY'] = 'dev'
|
||||
socketio = SocketIO(app)
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
return render_template("index.html")
|
||||
|
||||
@app.route("/gotoboop")
|
||||
def get_game_states():
|
||||
return states_to_send
|
||||
|
||||
@socketio.on("recieved")
|
||||
def handle_new_conn(data):
|
||||
socketio.emit("states_update", last_update, room=request.sid)
|
||||
|
||||
thread2 = threading.Thread(target=socketio.run,args=(app,))
|
||||
thread2.start()
|
||||
|
||||
master_games_dic = {} #key timestamp : (game game, {} state)
|
||||
last_update = {}
|
||||
|
||||
|
||||
def update_loop():
|
||||
while True:
|
||||
states_to_send = {}
|
||||
game_times = iter(master_games_dic.copy().keys())
|
||||
for game_time in game_times:
|
||||
this_game, state, discrim_string = master_games_dic[game_time]
|
||||
test_string = this_game.gamestate_display_full()
|
||||
print(discrim_string)
|
||||
state["leagueoruser"] = discrim_string
|
||||
state["display_inning"] = this_game.inning #games need to be initialized with the following keys in state:
|
||||
state["outs"] = this_game.outs #away_name
|
||||
state["pitcher"] = this_game.get_pitcher().name #home_name
|
||||
state["batter"] = this_game.get_batter().name #max_innings
|
||||
state["away_score"] = this_game.teams["away"].score #top_of_inning = True
|
||||
state["home_score"] = this_game.teams["home"].score #update_pause = 0
|
||||
#victory_lap = False
|
||||
if test_string == "Game not started.": #weather_emoji
|
||||
state["update_emoji"] = "🍿" #weather_text
|
||||
state["update_text"] = "Play blall!" #they also need a timestamp
|
||||
state["start_delay"] -= 1
|
||||
|
||||
state["display_top_of_inning"] = state["top_of_inning"]
|
||||
|
||||
if state["start_delay"] <= 0:
|
||||
if this_game.top_of_inning != state["top_of_inning"]:
|
||||
state["update_pause"] = 2
|
||||
state["pitcher"] = "-"
|
||||
state["batter"] = "-"
|
||||
if not state["top_of_inning"]:
|
||||
state["display_inning"] -= 1
|
||||
state["display_top_of_inning"] = False
|
||||
|
||||
if state["update_pause"] == 1:
|
||||
state["update_emoji"] = "🍿"
|
||||
if this_game.over:
|
||||
state["display_inning"] -= 1
|
||||
state["display_top_of_inning"] = False
|
||||
winning_team = this_game.teams['home'].name if this_game.teams['home'].score > this_game.teams['away'].score else this_game.teams['away'].name
|
||||
if this_game.victory_lap and winning_team == this_game.teams['home'].name:
|
||||
state["update_text"] = f"{winning_team} wins with a victory lap!"
|
||||
elif winning_team == this_game.teams['home'].name:
|
||||
state["update_text"] = f"{winning_team} wins, shaming {this_game.teams['away'].name}!"
|
||||
else:
|
||||
state["update_text"] = f"{winning_team} wins!"
|
||||
state["pitcher"] = "-"
|
||||
state["batter"] = "-"
|
||||
elif this_game.top_of_inning:
|
||||
state["update_text"] = f"Top of {this_game.inning}. {this_game.teams['away'].name} batting!"
|
||||
else:
|
||||
if this_game.inning >= this_game.max_innings:
|
||||
if this_game.teams["home"].score > this_game.teams["away"].score:
|
||||
this_game.victory_lap = True
|
||||
state["update_text"] = f"Bottom of {this_game.inning}. {this_game.teams['home'].name} batting!"
|
||||
|
||||
elif state["update_pause"] != 1 and test_string != "Game not started.":
|
||||
if "steals" in this_game.last_update[0].keys():
|
||||
updatestring = ""
|
||||
for attempt in this_game.last_update[0]["steals"]:
|
||||
updatestring += attempt + "\n"
|
||||
|
||||
state["update_emoji"] = "💎"
|
||||
state["update_text"] = updatestring
|
||||
|
||||
else:
|
||||
updatestring = ""
|
||||
punc = ""
|
||||
if this_game.last_update[0]["defender"] != "":
|
||||
punc = ". "
|
||||
|
||||
if "fc_out" in this_game.last_update[0].keys():
|
||||
name, base_string = this_game.last_update[0]['fc_out']
|
||||
updatestring = f"{this_game.last_update[0]['batter']} {this_game.last_update[0]['text'].value.format(name, base_string)} {this_game.last_update[0]['defender']}{punc}"
|
||||
else:
|
||||
updatestring = f"{this_game.last_update[0]['batter']} {this_game.last_update[0]['text'].value} {this_game.last_update[0]['defender']}{punc}"
|
||||
if this_game.last_update[1] > 0:
|
||||
updatestring += f"{this_game.last_update[1]} runs scored!"
|
||||
|
||||
state["update_emoji"] = "🏏"
|
||||
state["update_text"] = updatestring
|
||||
|
||||
state["bases"] = this_game.named_bases()
|
||||
|
||||
state["top_of_inning"] = this_game.top_of_inning
|
||||
|
||||
states_to_send[game_time] = state
|
||||
|
||||
if state["update_pause"] <= 1 and state["start_delay"] < 0:
|
||||
if this_game.over:
|
||||
state["update_pause"] = 2
|
||||
if state["end_delay"] < 0:
|
||||
master_games_dic.pop(game_time)
|
||||
state["end_delay"] -= 1
|
||||
else:
|
||||
this_game.gamestate_update_full()
|
||||
|
||||
state["update_pause"] -= 1
|
||||
|
||||
global last_update
|
||||
last_update = states_to_send
|
||||
|
||||
socketio.emit("states_update", states_to_send)
|
||||
time.sleep(6)
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,28 @@
|
|||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
137
matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/METADATA
Normal file
137
matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/METADATA
Normal file
|
@ -0,0 +1,137 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: Flask
|
||||
Version: 1.1.2
|
||||
Summary: A simple framework for building complex web applications.
|
||||
Home-page: https://palletsprojects.com/p/flask/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Documentation, https://flask.palletsprojects.com/
|
||||
Project-URL: Code, https://github.com/pallets/flask
|
||||
Project-URL: Issue tracker, https://github.com/pallets/flask/issues
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Framework :: Flask
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
|
||||
Requires-Dist: Werkzeug (>=0.15)
|
||||
Requires-Dist: Jinja2 (>=2.10.1)
|
||||
Requires-Dist: itsdangerous (>=0.24)
|
||||
Requires-Dist: click (>=5.1)
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: pytest ; extra == 'dev'
|
||||
Requires-Dist: coverage ; extra == 'dev'
|
||||
Requires-Dist: tox ; extra == 'dev'
|
||||
Requires-Dist: sphinx ; extra == 'dev'
|
||||
Requires-Dist: pallets-sphinx-themes ; extra == 'dev'
|
||||
Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'dev'
|
||||
Requires-Dist: sphinx-issues ; extra == 'dev'
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx ; extra == 'docs'
|
||||
Requires-Dist: pallets-sphinx-themes ; extra == 'docs'
|
||||
Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'docs'
|
||||
Requires-Dist: sphinx-issues ; extra == 'docs'
|
||||
Provides-Extra: dotenv
|
||||
Requires-Dist: python-dotenv ; extra == 'dotenv'
|
||||
|
||||
Flask
|
||||
=====
|
||||
|
||||
Flask is a lightweight `WSGI`_ web application framework. It is designed
|
||||
to make getting started quick and easy, with the ability to scale up to
|
||||
complex applications. It began as a simple wrapper around `Werkzeug`_
|
||||
and `Jinja`_ and has become one of the most popular Python web
|
||||
application frameworks.
|
||||
|
||||
Flask offers suggestions, but doesn't enforce any dependencies or
|
||||
project layout. It is up to the developer to choose the tools and
|
||||
libraries they want to use. There are many extensions provided by the
|
||||
community that make adding new functionality easy.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
pip install -U Flask
|
||||
|
||||
|
||||
A Simple Example
|
||||
----------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/")
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ env FLASK_APP=hello.py flask run
|
||||
* Serving Flask app "hello"
|
||||
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
For guidance on setting up a development environment and how to make a
|
||||
contribution to Flask, see the `contributing guidelines`_.
|
||||
|
||||
.. _contributing guidelines: https://github.com/pallets/flask/blob/master/CONTRIBUTING.rst
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Flask and the libraries
|
||||
it uses. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, `please
|
||||
donate today`_.
|
||||
|
||||
.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
* Website: https://palletsprojects.com/p/flask/
|
||||
* Documentation: https://flask.palletsprojects.com/
|
||||
* Releases: https://pypi.org/project/Flask/
|
||||
* Code: https://github.com/pallets/flask
|
||||
* Issue tracker: https://github.com/pallets/flask/issues
|
||||
* Test status: https://dev.azure.com/pallets/flask/_build
|
||||
* Official chat: https://discord.gg/t6rrQZH
|
||||
|
||||
.. _WSGI: https://wsgi.readthedocs.io
|
||||
.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/
|
||||
.. _Jinja: https://www.palletsprojects.com/p/jinja/
|
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
||||
|
||||
|
49
matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/RECORD
Normal file
49
matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/RECORD
Normal file
|
@ -0,0 +1,49 @@
|
|||
../../Scripts/flask.exe,sha256=KiH4h59NIDb6Al7dChbDgRnZM3G0HVlQ8Og8S6W1VGE,97149
|
||||
Flask-1.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Flask-1.1.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
Flask-1.1.2.dist-info/METADATA,sha256=3INpPWH6nKfZ33R2N-bQZy4TOe1wQCMweZc9mwcNrtc,4591
|
||||
Flask-1.1.2.dist-info/RECORD,,
|
||||
Flask-1.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
Flask-1.1.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
|
||||
Flask-1.1.2.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42
|
||||
Flask-1.1.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6
|
||||
flask/__init__.py,sha256=YnA9wkwbJcnb_jTT-nMsMFeFE_UWt33khKzdHmMSuyI,1894
|
||||
flask/__main__.py,sha256=fjVtt3QTANXlpJCOv3Ha7d5H-76MwzSIOab7SFD9TEk,254
|
||||
flask/__pycache__/__init__.cpython-38.pyc,,
|
||||
flask/__pycache__/__main__.cpython-38.pyc,,
|
||||
flask/__pycache__/_compat.cpython-38.pyc,,
|
||||
flask/__pycache__/app.cpython-38.pyc,,
|
||||
flask/__pycache__/blueprints.cpython-38.pyc,,
|
||||
flask/__pycache__/cli.cpython-38.pyc,,
|
||||
flask/__pycache__/config.cpython-38.pyc,,
|
||||
flask/__pycache__/ctx.cpython-38.pyc,,
|
||||
flask/__pycache__/debughelpers.cpython-38.pyc,,
|
||||
flask/__pycache__/globals.cpython-38.pyc,,
|
||||
flask/__pycache__/helpers.cpython-38.pyc,,
|
||||
flask/__pycache__/logging.cpython-38.pyc,,
|
||||
flask/__pycache__/sessions.cpython-38.pyc,,
|
||||
flask/__pycache__/signals.cpython-38.pyc,,
|
||||
flask/__pycache__/templating.cpython-38.pyc,,
|
||||
flask/__pycache__/testing.cpython-38.pyc,,
|
||||
flask/__pycache__/views.cpython-38.pyc,,
|
||||
flask/__pycache__/wrappers.cpython-38.pyc,,
|
||||
flask/_compat.py,sha256=8KPT54Iig96TuLipdogLRHNYToIcg-xPhnSV5VRERnw,4099
|
||||
flask/app.py,sha256=tmEhx_XrIRP24vZg39dHMWFzJ2jj-YxIcd51LaIT5cE,98059
|
||||
flask/blueprints.py,sha256=vkdm8NusGsfZUeIfPdCluj733QFmiQcT4Sk1tuZLUjw,21400
|
||||
flask/cli.py,sha256=SIb22uq9wYBeB2tKMl0pYdhtZ1MAQyZtPL-3m6es4G0,31035
|
||||
flask/config.py,sha256=3dejvQRYfNHw_V7dCLMxU8UNFpL34xIKemN7gHZIZ8Y,10052
|
||||
flask/ctx.py,sha256=cks-omGedkxawHFo6bKIrdOHsJCAgg1i_NWw_htxb5U,16724
|
||||
flask/debughelpers.py,sha256=-whvPKuAoU8AZ9c1z_INuOeBgfYDqE1J2xNBsoriugU,6475
|
||||
flask/globals.py,sha256=OgcHb6_NCyX6-TldciOdKcyj4PNfyQwClxdMhvov6aA,1637
|
||||
flask/helpers.py,sha256=IHa578HU_3XAAo1wpXQv24MYRYO5TzaiDQQwvUIcE6Q,43074
|
||||
flask/json/__init__.py,sha256=6nITbZYiYOPB8Qfi1-dvsblwn01KRz8VOsMBIZyaYek,11988
|
||||
flask/json/__pycache__/__init__.cpython-38.pyc,,
|
||||
flask/json/__pycache__/tag.cpython-38.pyc,,
|
||||
flask/json/tag.py,sha256=vq9GOllg_0kTWKuVFrwmkeOQzR-jdBD23x-89JyCCQI,8306
|
||||
flask/logging.py,sha256=WcY5UkqTysGfmosyygSlXyZYGwOp3y-VsE6ehoJ48dk,3250
|
||||
flask/sessions.py,sha256=G0KsEkr_i1LG_wOINwFSOW3ts7Xbv4bNgEZKc7TRloc,14360
|
||||
flask/signals.py,sha256=yYLOed2x8WnQ7pirGalQYfpYpCILJ0LJhmNSrnWvjqw,2212
|
||||
flask/templating.py,sha256=F8E_IZXn9BGsjMzUJ5N_ACMyZdiFBp_SSEaUunvfZ7g,4939
|
||||
flask/testing.py,sha256=WXsciCQbHBP7xjHqNvOA4bT0k86GvSNpgzncfXLDEEg,10146
|
||||
flask/views.py,sha256=eeWnadLAj0QdQPLtjKipDetRZyG62CT2y7fNOFDJz0g,5802
|
||||
flask/wrappers.py,sha256=kgsvtZuMM6RQaDqhRbc5Pcj9vqTnaERl2pmXcdGL7LU,4736
|
6
matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/WHEEL
Normal file
6
matteo_env/Lib/site-packages/Flask-1.1.2.dist-info/WHEEL
Normal file
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.33.6)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
[console_scripts]
|
||||
flask = flask.cli:main
|
||||
|
|
@ -0,0 +1 @@
|
|||
flask
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,20 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Miguel Grinberg
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,27 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: Flask-SocketIO
|
||||
Version: 5.0.1
|
||||
Summary: Socket.IO integration for Flask applications
|
||||
Home-page: http://github.com/miguelgrinberg/Flask-SocketIO/
|
||||
Author: Miguel Grinberg
|
||||
Author-email: miguelgrinberg50@gmail.com
|
||||
License: MIT
|
||||
Platform: any
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Dist: Flask (>=0.9)
|
||||
Requires-Dist: python-socketio (>=5.0.2)
|
||||
|
||||
|
||||
Flask-SocketIO
|
||||
--------------
|
||||
|
||||
Socket.IO integration for Flask applications.
|
||||
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
Flask_SocketIO-5.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Flask_SocketIO-5.0.1.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082
|
||||
Flask_SocketIO-5.0.1.dist-info/METADATA,sha256=ZGkOXFiwpUToh06H483U7XKXKi7tybYhE9ajnguMpFE,825
|
||||
Flask_SocketIO-5.0.1.dist-info/RECORD,,
|
||||
Flask_SocketIO-5.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
Flask_SocketIO-5.0.1.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
|
||||
Flask_SocketIO-5.0.1.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15
|
||||
flask_socketio/__init__.py,sha256=hy8Sh1yu3zsZF3QUC0_U4VNsBumswPBAWoqNOlgA8wk,47464
|
||||
flask_socketio/__pycache__/__init__.cpython-38.pyc,,
|
||||
flask_socketio/__pycache__/namespace.cpython-38.pyc,,
|
||||
flask_socketio/__pycache__/test_client.cpython-38.pyc,,
|
||||
flask_socketio/namespace.py,sha256=mt8S3u-Iwn-wHaV0QwVMMItOQZtc-A2bxMnlwQHLuxw,2019
|
||||
flask_socketio/test_client.py,sha256=YW7mz4jO3XZAg1Xg_QDy4va233oyYO9Uo9JHJ0v2FCA,10269
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.36.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1 @@
|
|||
flask_socketio
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,28 @@
|
|||
Copyright 2007 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
106
matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/METADATA
Normal file
106
matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/METADATA
Normal file
|
@ -0,0 +1,106 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: Jinja2
|
||||
Version: 2.11.2
|
||||
Summary: A very fast and expressive template engine.
|
||||
Home-page: https://palletsprojects.com/p/jinja/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Documentation, https://jinja.palletsprojects.com/
|
||||
Project-URL: Code, https://github.com/pallets/jinja
|
||||
Project-URL: Issue tracker, https://github.com/pallets/jinja/issues
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
|
||||
Description-Content-Type: text/x-rst
|
||||
Requires-Dist: MarkupSafe (>=0.23)
|
||||
Provides-Extra: i18n
|
||||
Requires-Dist: Babel (>=0.8) ; extra == 'i18n'
|
||||
|
||||
Jinja
|
||||
=====
|
||||
|
||||
Jinja is a fast, expressive, extensible templating engine. Special
|
||||
placeholders in the template allow writing code similar to Python
|
||||
syntax. Then the template is passed data to render the final document.
|
||||
|
||||
It includes:
|
||||
|
||||
- Template inheritance and inclusion.
|
||||
- Define and import macros within templates.
|
||||
- HTML templates can use autoescaping to prevent XSS from untrusted
|
||||
user input.
|
||||
- A sandboxed environment can safely render untrusted templates.
|
||||
- AsyncIO support for generating templates and calling async
|
||||
functions.
|
||||
- I18N support with Babel.
|
||||
- Templates are compiled to optimized Python code just-in-time and
|
||||
cached, or can be compiled ahead-of-time.
|
||||
- Exceptions point to the correct line in templates to make debugging
|
||||
easier.
|
||||
- Extensible filters, tests, functions, and even syntax.
|
||||
|
||||
Jinja's philosophy is that while application logic belongs in Python if
|
||||
possible, it shouldn't make the template designer's job difficult by
|
||||
restricting functionality too much.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ pip install -U Jinja2
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
||||
|
||||
|
||||
In A Nutshell
|
||||
-------------
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% extends "base.html" %}
|
||||
{% block title %}Members{% endblock %}
|
||||
{% block content %}
|
||||
<ul>
|
||||
{% for user in users %}
|
||||
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Website: https://palletsprojects.com/p/jinja/
|
||||
- Documentation: https://jinja.palletsprojects.com/
|
||||
- Releases: https://pypi.org/project/Jinja2/
|
||||
- Code: https://github.com/pallets/jinja
|
||||
- Issue tracker: https://github.com/pallets/jinja/issues
|
||||
- Test status: https://dev.azure.com/pallets/jinja/_build
|
||||
- Official chat: https://discord.gg/t6rrQZH
|
||||
|
||||
|
61
matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/RECORD
Normal file
61
matteo_env/Lib/site-packages/Jinja2-2.11.2.dist-info/RECORD
Normal file
|
@ -0,0 +1,61 @@
|
|||
Jinja2-2.11.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Jinja2-2.11.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
|
||||
Jinja2-2.11.2.dist-info/METADATA,sha256=5ZHRZoIRAMHsJPnqhlJ622_dRPsYePYJ-9EH4-Ry7yI,3535
|
||||
Jinja2-2.11.2.dist-info/RECORD,,
|
||||
Jinja2-2.11.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||
Jinja2-2.11.2.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61
|
||||
Jinja2-2.11.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
|
||||
jinja2/__init__.py,sha256=0QCM_jKKDM10yzSdHRVV4mQbCbDqf0GN0GirAqibn9Y,1549
|
||||
jinja2/__pycache__/__init__.cpython-38.pyc,,
|
||||
jinja2/__pycache__/_compat.cpython-38.pyc,,
|
||||
jinja2/__pycache__/_identifier.cpython-38.pyc,,
|
||||
jinja2/__pycache__/asyncfilters.cpython-38.pyc,,
|
||||
jinja2/__pycache__/asyncsupport.cpython-38.pyc,,
|
||||
jinja2/__pycache__/bccache.cpython-38.pyc,,
|
||||
jinja2/__pycache__/compiler.cpython-38.pyc,,
|
||||
jinja2/__pycache__/constants.cpython-38.pyc,,
|
||||
jinja2/__pycache__/debug.cpython-38.pyc,,
|
||||
jinja2/__pycache__/defaults.cpython-38.pyc,,
|
||||
jinja2/__pycache__/environment.cpython-38.pyc,,
|
||||
jinja2/__pycache__/exceptions.cpython-38.pyc,,
|
||||
jinja2/__pycache__/ext.cpython-38.pyc,,
|
||||
jinja2/__pycache__/filters.cpython-38.pyc,,
|
||||
jinja2/__pycache__/idtracking.cpython-38.pyc,,
|
||||
jinja2/__pycache__/lexer.cpython-38.pyc,,
|
||||
jinja2/__pycache__/loaders.cpython-38.pyc,,
|
||||
jinja2/__pycache__/meta.cpython-38.pyc,,
|
||||
jinja2/__pycache__/nativetypes.cpython-38.pyc,,
|
||||
jinja2/__pycache__/nodes.cpython-38.pyc,,
|
||||
jinja2/__pycache__/optimizer.cpython-38.pyc,,
|
||||
jinja2/__pycache__/parser.cpython-38.pyc,,
|
||||
jinja2/__pycache__/runtime.cpython-38.pyc,,
|
||||
jinja2/__pycache__/sandbox.cpython-38.pyc,,
|
||||
jinja2/__pycache__/tests.cpython-38.pyc,,
|
||||
jinja2/__pycache__/utils.cpython-38.pyc,,
|
||||
jinja2/__pycache__/visitor.cpython-38.pyc,,
|
||||
jinja2/_compat.py,sha256=B6Se8HjnXVpzz9-vfHejn-DV2NjaVK-Iewupc5kKlu8,3191
|
||||
jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775
|
||||
jinja2/asyncfilters.py,sha256=XJtYXTxFvcJ5xwk6SaDL4S0oNnT0wPYvXBCSzc482fI,4250
|
||||
jinja2/asyncsupport.py,sha256=ZBFsDLuq3Gtji3Ia87lcyuDbqaHZJRdtShZcqwpFnSQ,7209
|
||||
jinja2/bccache.py,sha256=3Pmp4jo65M9FQuIxdxoDBbEDFwe4acDMQf77nEJfrHA,12139
|
||||
jinja2/compiler.py,sha256=Ta9W1Lit542wItAHXlDcg0sEOsFDMirCdlFPHAurg4o,66284
|
||||
jinja2/constants.py,sha256=RR1sTzNzUmKco6aZicw4JpQpJGCuPuqm1h1YmCNUEFY,1458
|
||||
jinja2/debug.py,sha256=neR7GIGGjZH3_ILJGVUYy3eLQCCaWJMXOb7o0kGInWc,8529
|
||||
jinja2/defaults.py,sha256=85B6YUUCyWPSdrSeVhcqFVuu_bHUAQXeey--FIwSeVQ,1126
|
||||
jinja2/environment.py,sha256=XDSLKc4SqNLMOwTSq3TbWEyA5WyXfuLuVD0wAVjEFwM,50629
|
||||
jinja2/exceptions.py,sha256=VjNLawcmf2ODffqVMCQK1cRmvFaUfQWF4u8ouP3QPcE,5425
|
||||
jinja2/ext.py,sha256=AtwL5O5enT_L3HR9-oBvhGyUTdGoyaqG_ICtnR_EVd4,26441
|
||||
jinja2/filters.py,sha256=_RpPgAlgIj7ExvyDzcHAC3B36cocfWK-1TEketbNeM0,41415
|
||||
jinja2/idtracking.py,sha256=J3O4VHsrbf3wzwiBc7Cro26kHb6_5kbULeIOzocchIU,9211
|
||||
jinja2/lexer.py,sha256=nUFLRKhhKmmEWkLI65nQePgcQs7qsRdjVYZETMt_v0g,30331
|
||||
jinja2/loaders.py,sha256=C-fST_dmFjgWkp0ZuCkrgICAoOsoSIF28wfAFink0oU,17666
|
||||
jinja2/meta.py,sha256=QjyYhfNRD3QCXjBJpiPl9KgkEkGXJbAkCUq4-Ur10EQ,4131
|
||||
jinja2/nativetypes.py,sha256=Ul__gtVw4xH-0qvUvnCNHedQeNDwmEuyLJztzzSPeRg,2753
|
||||
jinja2/nodes.py,sha256=Mk1oJPVgIjnQw9WOqILvcu3rLepcFZ0ahxQm2mbwDwc,31095
|
||||
jinja2/optimizer.py,sha256=gQLlMYzvQhluhzmAIFA1tXS0cwgWYOjprN-gTRcHVsc,1457
|
||||
jinja2/parser.py,sha256=fcfdqePNTNyvosIvczbytVA332qpsURvYnCGcjDHSkA,35660
|
||||
jinja2/runtime.py,sha256=0y-BRyIEZ9ltByL2Id6GpHe1oDRQAwNeQvI0SKobNMw,30618
|
||||
jinja2/sandbox.py,sha256=knayyUvXsZ-F0mk15mO2-ehK9gsw04UhB8td-iUOtLc,17127
|
||||
jinja2/tests.py,sha256=iO_Y-9Vo60zrVe1lMpSl5sKHqAxe2leZHC08OoZ8K24,4799
|
||||
jinja2/utils.py,sha256=OoVMlQe9S2-lWT6jJbTu9tDuDvGNyWUhHDcE51i5_Do,22522
|
||||
jinja2/visitor.py,sha256=DUHupl0a4PGp7nxRtZFttUzAi1ccxzqc2hzetPYUz8U,3240
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
[babel.extractors]
|
||||
jinja2 = jinja2.ext:babel_extract [i18n]
|
||||
|
|
@ -0,0 +1 @@
|
|||
jinja2
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,28 @@
|
|||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
105
matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/METADATA
Normal file
105
matteo_env/Lib/site-packages/MarkupSafe-1.1.1.dist-info/METADATA
Normal file
|
@ -0,0 +1,105 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: MarkupSafe
|
||||
Version: 1.1.1
|
||||
Summary: Safely add untrusted strings to HTML/XML markup.
|
||||
Home-page: https://palletsprojects.com/p/markupsafe/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: The Pallets Team
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
||||
Project-URL: Code, https://github.com/pallets/markupsafe
|
||||
Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
|
||||
Description-Content-Type: text/x-rst
|
||||
|
||||
MarkupSafe
|
||||
==========
|
||||
|
||||
MarkupSafe implements a text object that escapes characters so it is
|
||||
safe to use in HTML and XML. Characters that have special meanings are
|
||||
replaced so that they display as the actual characters. This mitigates
|
||||
injection attacks, meaning untrusted user input can safely be displayed
|
||||
on a page.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
pip install -U MarkupSafe
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from markupsafe import Markup, escape
|
||||
>>> # escape replaces special characters and wraps in Markup
|
||||
>>> escape('<script>alert(document.cookie);</script>')
|
||||
Markup(u'<script>alert(document.cookie);</script>')
|
||||
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
||||
>>> Markup('<strong>Hello</strong>')
|
||||
Markup('<strong>hello</strong>')
|
||||
>>> escape(Markup('<strong>Hello</strong>'))
|
||||
Markup('<strong>hello</strong>')
|
||||
>>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
|
||||
>>> # methods and operators escape their arguments
|
||||
>>> template = Markup("Hello <em>%s</em>")
|
||||
>>> template % '"World"'
|
||||
Markup('Hello <em>"World"</em>')
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports MarkupSafe and other
|
||||
libraries that use it. In order to grow the community of contributors
|
||||
and users, and allow the maintainers to devote more time to the
|
||||
projects, `please donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
* Website: https://palletsprojects.com/p/markupsafe/
|
||||
* Documentation: https://markupsafe.palletsprojects.com/
|
||||
* License: `BSD-3-Clause <https://github.com/pallets/markupsafe/blob/master/LICENSE.rst>`_
|
||||
* Releases: https://pypi.org/project/MarkupSafe/
|
||||
* Code: https://github.com/pallets/markupsafe
|
||||
* Issue tracker: https://github.com/pallets/markupsafe/issues
|
||||
* Test status:
|
||||
|
||||
* Linux, Mac: https://travis-ci.org/pallets/markupsafe
|
||||
* Windows: https://ci.appveyor.com/project/pallets/markupsafe
|
||||
|
||||
* Test coverage: https://codecov.io/gh/pallets/markupsafe
|
||||
* Official chat: https://discord.gg/t6rrQZH
|
||||
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
MarkupSafe-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
MarkupSafe-1.1.1.dist-info/LICENSE.rst,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503
|
||||
MarkupSafe-1.1.1.dist-info/METADATA,sha256=IFCP4hCNGjXJgMoSvdjPiKDLAMUTTWoxKXQsQvmyMNU,3653
|
||||
MarkupSafe-1.1.1.dist-info/RECORD,,
|
||||
MarkupSafe-1.1.1.dist-info/WHEEL,sha256=jovIjvNuo6l5lHtTPdXyjKVQ_5SCkmdptE5fkPNfjyM,101
|
||||
MarkupSafe-1.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
|
||||
markupsafe/__init__.py,sha256=UAy1UKlykemnSZWIVn8RDqY0wvjV6lkeRwYOMNhw4bA,10453
|
||||
markupsafe/__pycache__/__init__.cpython-38.pyc,,
|
||||
markupsafe/__pycache__/_compat.cpython-38.pyc,,
|
||||
markupsafe/__pycache__/_constants.cpython-38.pyc,,
|
||||
markupsafe/__pycache__/_native.cpython-38.pyc,,
|
||||
markupsafe/_compat.py,sha256=XweNhJEcyTP_wIBUaIO6nxzIb6XFwweriXyZfiTpkdw,591
|
||||
markupsafe/_constants.py,sha256=IXLUQkLM6CTustG5vEQTEy6pBB3z5pm84NkYU1aW9qI,4954
|
||||
markupsafe/_native.py,sha256=LwsYk-GHoPsPboRD_tNC6_jTmCj3MLtsnDFis7HjE50,1942
|
||||
markupsafe/_speedups.cp38-win32.pyd,sha256=8nGEdcR_DUewvF72FxW6d-aD5SYoLMe2prWwEEnH9ck,12800
|
|
@ -0,0 +1,5 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.33.6)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp38-cp38-win32
|
||||
|
|
@ -0,0 +1 @@
|
|||
markupsafe
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,28 @@
|
|||
Copyright 2007 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
128
matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/METADATA
Normal file
128
matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/METADATA
Normal file
|
@ -0,0 +1,128 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: Werkzeug
|
||||
Version: 1.0.1
|
||||
Summary: The comprehensive WSGI web application library.
|
||||
Home-page: https://palletsprojects.com/p/werkzeug/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Documentation, https://werkzeug.palletsprojects.com/
|
||||
Project-URL: Code, https://github.com/pallets/werkzeug
|
||||
Project-URL: Issue tracker, https://github.com/pallets/werkzeug/issues
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
|
||||
Description-Content-Type: text/x-rst
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: pytest ; extra == 'dev'
|
||||
Requires-Dist: pytest-timeout ; extra == 'dev'
|
||||
Requires-Dist: coverage ; extra == 'dev'
|
||||
Requires-Dist: tox ; extra == 'dev'
|
||||
Requires-Dist: sphinx ; extra == 'dev'
|
||||
Requires-Dist: pallets-sphinx-themes ; extra == 'dev'
|
||||
Requires-Dist: sphinx-issues ; extra == 'dev'
|
||||
Provides-Extra: watchdog
|
||||
Requires-Dist: watchdog ; extra == 'watchdog'
|
||||
|
||||
Werkzeug
|
||||
========
|
||||
|
||||
*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
|
||||
|
||||
Werkzeug is a comprehensive `WSGI`_ web application library. It began as
|
||||
a simple collection of various utilities for WSGI applications and has
|
||||
become one of the most advanced WSGI utility libraries.
|
||||
|
||||
It includes:
|
||||
|
||||
- An interactive debugger that allows inspecting stack traces and
|
||||
source code in the browser with an interactive interpreter for any
|
||||
frame in the stack.
|
||||
- A full-featured request object with objects to interact with
|
||||
headers, query args, form data, files, and cookies.
|
||||
- A response object that can wrap other WSGI applications and handle
|
||||
streaming data.
|
||||
- A routing system for matching URLs to endpoints and generating URLs
|
||||
for endpoints, with an extensible system for capturing variables
|
||||
from URLs.
|
||||
- HTTP utilities to handle entity tags, cache control, dates, user
|
||||
agents, cookies, files, and more.
|
||||
- A threaded WSGI server for use while developing applications
|
||||
locally.
|
||||
- A test client for simulating HTTP requests during testing without
|
||||
requiring running a server.
|
||||
|
||||
Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up
|
||||
to the developer to choose a template engine, database adapter, and even
|
||||
how to handle requests. It can be used to build all sorts of end user
|
||||
applications such as blogs, wikis, or bulletin boards.
|
||||
|
||||
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
|
||||
providing more structure and patterns for defining powerful
|
||||
applications.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
pip install -U Werkzeug
|
||||
|
||||
|
||||
A Simple Example
|
||||
----------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from werkzeug.wrappers import Request, Response
|
||||
|
||||
@Request.application
|
||||
def application(request):
|
||||
return Response('Hello, World!')
|
||||
|
||||
if __name__ == '__main__':
|
||||
from werkzeug.serving import run_simple
|
||||
run_simple('localhost', 4000, application)
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Website: https://palletsprojects.com/p/werkzeug/
|
||||
- Documentation: https://werkzeug.palletsprojects.com/
|
||||
- Releases: https://pypi.org/project/Werkzeug/
|
||||
- Code: https://github.com/pallets/werkzeug
|
||||
- Issue tracker: https://github.com/pallets/werkzeug/issues
|
||||
- Test status: https://dev.azure.com/pallets/werkzeug/_build
|
||||
- Official chat: https://discord.gg/t6rrQZH
|
||||
|
||||
.. _WSGI: https://wsgi.readthedocs.io/en/latest/
|
||||
.. _Flask: https://www.palletsprojects.com/p/flask/
|
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
||||
|
||||
|
101
matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/RECORD
Normal file
101
matteo_env/Lib/site-packages/Werkzeug-1.0.1.dist-info/RECORD
Normal file
|
@ -0,0 +1,101 @@
|
|||
Werkzeug-1.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Werkzeug-1.0.1.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
|
||||
Werkzeug-1.0.1.dist-info/METADATA,sha256=d0zmVNa4UC2-nAo2A8_81oiy123D6JTGRSuY_Ymgyt4,4730
|
||||
Werkzeug-1.0.1.dist-info/RECORD,,
|
||||
Werkzeug-1.0.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||
Werkzeug-1.0.1.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
|
||||
werkzeug/__init__.py,sha256=rb-yPiXOjTLbtDOl5fQp5hN7oBdaoXAoQ-slAAvfZAo,502
|
||||
werkzeug/__pycache__/__init__.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/_compat.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/_internal.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/_reloader.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/datastructures.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/exceptions.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/filesystem.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/formparser.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/http.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/local.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/posixemulation.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/routing.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/security.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/serving.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/test.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/testapp.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/urls.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/useragents.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/utils.cpython-38.pyc,,
|
||||
werkzeug/__pycache__/wsgi.cpython-38.pyc,,
|
||||
werkzeug/_compat.py,sha256=zjufTNrhQ8BgYSGSh-sVu6iW3r3O9WzjE9j-qJobx-g,6671
|
||||
werkzeug/_internal.py,sha256=d_4AqheyS6dHMViwdc0drFrjs67ZzT6Ej2gWf-Z-Iys,14351
|
||||
werkzeug/_reloader.py,sha256=I3mg3oRQ0lLzl06oEoVopN3bN7CtINuuUQdqDcmTnEs,11531
|
||||
werkzeug/datastructures.py,sha256=AonxOcwU0TPMEzfKF1368ySULxHgxE-JE-DEAGdo2ts,100480
|
||||
werkzeug/debug/__init__.py,sha256=3RtUMc5Y9hYyK11ugHltgkQ9Dt-ViR945Vy_X5NV7zU,17289
|
||||
werkzeug/debug/__pycache__/__init__.cpython-38.pyc,,
|
||||
werkzeug/debug/__pycache__/console.cpython-38.pyc,,
|
||||
werkzeug/debug/__pycache__/repr.cpython-38.pyc,,
|
||||
werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,,
|
||||
werkzeug/debug/console.py,sha256=OATaO7KHYMqpbzIFe1HeW9Mnl3wZgA3jMQoGDPn5URc,5488
|
||||
werkzeug/debug/repr.py,sha256=lIwuhbyrMwVe3P_cFqNyqzHL7P93TLKod7lw9clydEw,9621
|
||||
werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
|
||||
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
|
||||
werkzeug/debug/shared/debugger.js,sha256=rOhqZMRfpZnnu6_XCGn6wMWPhtfwRAcyZKksdIxPJas,6400
|
||||
werkzeug/debug/shared/jquery.js,sha256=CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo,88145
|
||||
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
|
||||
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
|
||||
werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
|
||||
werkzeug/debug/shared/style.css,sha256=gZ9uhmb5zj3XLuT9RvnMp6jMINgQ-VVBCp-2AZbG3YQ,6604
|
||||
werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
|
||||
werkzeug/debug/tbtools.py,sha256=2iJ8RURUZUSbopOIehy53LnVJWx47lsHN2V2l6hc7Wc,20363
|
||||
werkzeug/exceptions.py,sha256=UTYSDkmAsH-vt8VSidlEffwqBVNXuT7bRg-_NqgUe8A,25188
|
||||
werkzeug/filesystem.py,sha256=HzKl-j0Hd8Jl66j778UbPTAYNnY6vUZgYLlBZ0e7uw0,2101
|
||||
werkzeug/formparser.py,sha256=Sto0jZid9im9ZVIf56vilCdyX-arK33wSftkYsLCnzo,21788
|
||||
werkzeug/http.py,sha256=KVRV3yFK14PJeI56qClEq4qxFdvKUQVy4C_dwuWz9_Q,43107
|
||||
werkzeug/local.py,sha256=_Tk7gB238pPWUU7habxFkZF02fiCMRVW6d62YWL1Rh0,14371
|
||||
werkzeug/middleware/__init__.py,sha256=f1SFZo67IlW4k1uqKzNHxYQlsakUS-D6KK_j0e3jjwQ,549
|
||||
werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,,
|
||||
werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,,
|
||||
werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,,
|
||||
werkzeug/middleware/__pycache__/lint.cpython-38.pyc,,
|
||||
werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,,
|
||||
werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,,
|
||||
werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,,
|
||||
werkzeug/middleware/dispatcher.py,sha256=_-KoMzHtcISHS7ouWKAOraqlCLprdh83YOAn_8DjLp8,2240
|
||||
werkzeug/middleware/http_proxy.py,sha256=lRjTdMmghHiZuZrS7_UJ3gZc-vlFizhBbFZ-XZPLwIA,7117
|
||||
werkzeug/middleware/lint.py,sha256=ItTwuWJnflF8xMT1uqU_Ty1ryhux-CjeUfskqaUpxsw,12967
|
||||
werkzeug/middleware/profiler.py,sha256=8B_s23d6BGrU_q54gJsm6kcCbOJbTSqrXCsioHON0Xs,4471
|
||||
werkzeug/middleware/proxy_fix.py,sha256=K5oZ3DPXOzdZi0Xba5zW7ClPOxgUuqXHQHvY2-AWCGw,6431
|
||||
werkzeug/middleware/shared_data.py,sha256=sPSRTKqtKSVBUyN8fr6jOJbdq9cdOLu6pg3gz4Y_1Xo,9599
|
||||
werkzeug/posixemulation.py,sha256=gSSiv1SCmOyzOM_nq1ZaZCtxP__C5MeDJl_4yXJmi4Q,3541
|
||||
werkzeug/routing.py,sha256=6-iZ7CKeUILYAehoKXLbmi5E6LgLbwuzUh8TNplnf5Q,79019
|
||||
werkzeug/security.py,sha256=81149MplFq7-hD4RK4sKp9kzXXejjV9D4lWBzaRyeQ8,8106
|
||||
werkzeug/serving.py,sha256=YvTqvurA-Mnj8mkqRe2kBdVr2ap4ibCq1ByQjOA6g1w,38694
|
||||
werkzeug/test.py,sha256=GJ9kxTMSJ-nB7kfGtxuROr9JGmXxDRev-2U1SkeUJGE,39564
|
||||
werkzeug/testapp.py,sha256=bHekqMsqRfVxwgFbvOMem-DYa_sdB7R47yUXpt1RUTo,9329
|
||||
werkzeug/urls.py,sha256=T8-hV_1vwhu6xhX93FwsHteK-W-kIE2orj5WoMf-WFw,39322
|
||||
werkzeug/useragents.py,sha256=TSoGv5IOvP375eK5gLLpsLQCeUgTR6sO1WftmAP_YvM,5563
|
||||
werkzeug/utils.py,sha256=hrVK4u_wi8z9viBO9bgOLlm1aaIvCpn-p2d1FeZQDEo,25251
|
||||
werkzeug/wrappers/__init__.py,sha256=S4VioKAmF_av9Ec9zQvG71X1EOkYfPx1TYck9jyDiyY,1384
|
||||
werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/accept.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/auth.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/base_request.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/base_response.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/common_descriptors.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/cors.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/etag.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/json.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/request.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/response.cpython-38.pyc,,
|
||||
werkzeug/wrappers/__pycache__/user_agent.cpython-38.pyc,,
|
||||
werkzeug/wrappers/accept.py,sha256=TIvjUc0g73fhTWX54wg_D9NNzKvpnG1X8u1w26tK1o8,1760
|
||||
werkzeug/wrappers/auth.py,sha256=Pmn6iaGHBrUyHbJpW0lZhO_q9RVoAa5QalaTqcavdAI,1158
|
||||
werkzeug/wrappers/base_request.py,sha256=4TuGlKWeKQdlq4eU94hJYcXSfWo8Rk7CS1Ef5lJ3ZM0,26012
|
||||
werkzeug/wrappers/base_response.py,sha256=JTxJZ8o-IBetpoWJqt2HFwPaNWNDAlM3_GXJe1Whw80,27784
|
||||
werkzeug/wrappers/common_descriptors.py,sha256=X2Ktd5zUWsmcd4ciaF62Dd8Lru9pLGP_XDUNukc8cXs,12829
|
||||
werkzeug/wrappers/cors.py,sha256=XMbaCol4dWTGvb-dCJBoN0p3JX91v93AIAHd7tnB3L4,3466
|
||||
werkzeug/wrappers/etag.py,sha256=XMXtyfByBsOjxwaX8U7ZtUY7JXkbQLP45oXZ0qkyTNs,12217
|
||||
werkzeug/wrappers/json.py,sha256=HvK_A4NpO0sLqgb10sTJcoZydYOwyNiPCJPV7SVgcgE,4343
|
||||
werkzeug/wrappers/request.py,sha256=QbHGqDpGPN684pnOPEokwkPESfm-NnfYM7ydOMxW_NI,1514
|
||||
werkzeug/wrappers/response.py,sha256=Oqv8TMG_dnOKTq_V30ddgkO5B7IJhkVPODvm7cbhZ3c,2524
|
||||
werkzeug/wrappers/user_agent.py,sha256=YJb-vr12cujG7sQMG9V89VsJa-03SWSenhg1W4cT0EY,435
|
||||
werkzeug/wsgi.py,sha256=ZGk85NzRyQTzkYis-xl8V9ydJgfClBdStvhzDzER2mw,34367
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1 @@
|
|||
werkzeug
|
|
@ -0,0 +1 @@
|
|||
pip
|
373
matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/LICENSE
Normal file
373
matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/LICENSE
Normal file
|
@ -0,0 +1,373 @@
|
|||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
334
matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/METADATA
Normal file
334
matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/METADATA
Normal file
|
@ -0,0 +1,334 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: bidict
|
||||
Version: 0.21.2
|
||||
Summary: The bidirectional mapping library for Python.
|
||||
Home-page: https://bidict.readthedocs.io
|
||||
Author: Joshua Bronson
|
||||
Author-email: jabronson@gmail.com
|
||||
License: MPL 2.0
|
||||
Keywords: dict dictionary mapping datastructure bimap bijection bijective injective inverse reverse bidirectional two-way 2-way
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 4 - Beta
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/x-rst
|
||||
Provides-Extra: coverage
|
||||
Requires-Dist: coverage (<6) ; extra == 'coverage'
|
||||
Requires-Dist: pytest-cov (<3) ; extra == 'coverage'
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: setuptools-scm ; extra == 'dev'
|
||||
Requires-Dist: hypothesis (<6) ; extra == 'dev'
|
||||
Requires-Dist: py (<2) ; extra == 'dev'
|
||||
Requires-Dist: pytest (<7) ; extra == 'dev'
|
||||
Requires-Dist: pytest-benchmark (<4,>=3.2.0) ; extra == 'dev'
|
||||
Requires-Dist: sortedcollections (<2) ; extra == 'dev'
|
||||
Requires-Dist: sortedcontainers (<3) ; extra == 'dev'
|
||||
Requires-Dist: Sphinx (<4) ; extra == 'dev'
|
||||
Requires-Dist: sphinx-autodoc-typehints (<2) ; extra == 'dev'
|
||||
Requires-Dist: coverage (<6) ; extra == 'dev'
|
||||
Requires-Dist: pytest-cov (<3) ; extra == 'dev'
|
||||
Requires-Dist: pre-commit (<3) ; extra == 'dev'
|
||||
Requires-Dist: tox (<4) ; extra == 'dev'
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: Sphinx (<4) ; extra == 'docs'
|
||||
Requires-Dist: sphinx-autodoc-typehints (<2) ; extra == 'docs'
|
||||
Provides-Extra: precommit
|
||||
Requires-Dist: pre-commit (<3) ; extra == 'precommit'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: hypothesis (<6) ; extra == 'test'
|
||||
Requires-Dist: py (<2) ; extra == 'test'
|
||||
Requires-Dist: pytest (<7) ; extra == 'test'
|
||||
Requires-Dist: pytest-benchmark (<4,>=3.2.0) ; extra == 'test'
|
||||
Requires-Dist: sortedcollections (<2) ; extra == 'test'
|
||||
Requires-Dist: sortedcontainers (<3) ; extra == 'test'
|
||||
Requires-Dist: Sphinx (<4) ; extra == 'test'
|
||||
Requires-Dist: sphinx-autodoc-typehints (<2) ; extra == 'test'
|
||||
|
||||
.. Forward declarations for all the custom interpreted text roles that
|
||||
Sphinx defines and that are used below. This helps Sphinx-unaware tools
|
||||
(e.g. rst2html, PyPI's and GitHub's renderers, etc.).
|
||||
.. role:: doc
|
||||
|
||||
.. Use :doc: rather than :ref: references below for better interop as well.
|
||||
|
||||
|
||||
``bidict``
|
||||
==========
|
||||
|
||||
The bidirectional mapping library for Python.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/jab/bidict/master/assets/logo-sm.png
|
||||
:target: https://bidict.readthedocs.io/
|
||||
:alt: bidict logo
|
||||
|
||||
|
||||
Status
|
||||
------
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/bidict.svg
|
||||
:target: https://pypi.org/project/bidict
|
||||
:alt: Latest release
|
||||
|
||||
.. image:: https://img.shields.io/readthedocs/bidict/master.svg
|
||||
:target: https://bidict.readthedocs.io/en/master/
|
||||
:alt: Documentation
|
||||
|
||||
.. image:: https://api.travis-ci.org/jab/bidict.svg?branch=master
|
||||
:target: https://travis-ci.org/jab/bidict
|
||||
:alt: Travis-CI build status
|
||||
|
||||
.. image:: https://codecov.io/gh/jab/bidict/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/jab/bidict
|
||||
:alt: Test coverage
|
||||
|
||||
.. Hide to reduce clutter
|
||||
.. image:: https://img.shields.io/lgtm/alerts/github/jab/bidict.svg
|
||||
:target: https://lgtm.com/projects/g/jab/bidict/
|
||||
:alt: LGTM alerts
|
||||
.. image:: https://bestpractices.coreinfrastructure.org/projects/2354/badge
|
||||
:target: https://bestpractices.coreinfrastructure.org/en/projects/2354
|
||||
:alt: CII best practices badge
|
||||
.. image:: https://img.shields.io/badge/tidelift-pro%20support-orange.svg
|
||||
:target: https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=docs
|
||||
:alt: Paid support available via Tidelift
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/gk133415udncwto3/branch/master?svg=true
|
||||
:target: https://ci.appveyor.com/project/jab/bidict
|
||||
:alt: AppVeyor (Windows) build status
|
||||
.. image:: https://img.shields.io/pypi/pyversions/bidict.svg
|
||||
:target: https://pypi.org/project/bidict
|
||||
:alt: Supported Python versions
|
||||
.. image:: https://img.shields.io/pypi/implementation/bidict.svg
|
||||
:target: https://pypi.org/project/bidict
|
||||
:alt: Supported Python implementations
|
||||
|
||||
.. image:: https://img.shields.io/pypi/l/bidict.svg
|
||||
:target: https://raw.githubusercontent.com/jab/bidict/master/LICENSE
|
||||
:alt: License
|
||||
|
||||
.. image:: https://static.pepy.tech/badge/bidict
|
||||
:target: https://pepy.tech/project/bidict
|
||||
:alt: PyPI Downloads
|
||||
|
||||
|
||||
``bidict``:
|
||||
^^^^^^^^^^^
|
||||
|
||||
- has been used for many years by several teams at
|
||||
**Google, Venmo, CERN, Bank of America Merrill Lynch, Bloomberg, Two Sigma,** and many others
|
||||
- has carefully designed APIs for
|
||||
**safety, simplicity, flexibility, and ergonomics**
|
||||
- is **fast, lightweight, and has no runtime dependencies** other than Python's standard library
|
||||
- **integrates natively** with Python’s ``collections.abc`` interfaces
|
||||
- provides **type hints** for all public APIs
|
||||
- is implemented in **concise, well-factored, pure (PyPy-compatible) Python code**
|
||||
that is **optimized for running efficiently**
|
||||
as well as for **reading and learning** [#fn-learning]_
|
||||
- has **extensive docs and test coverage**
|
||||
(including property-based tests and benchmarks)
|
||||
run continuously on all supported Python versions
|
||||
|
||||
|
||||
Note: Python 3 Required
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
As promised in the 0.18.2 release (see :doc:`changelog` [#fn-changelog]_),
|
||||
**Python 2 is no longer supported**.
|
||||
Version 0.18.3
|
||||
is the last release of ``bidict`` that supports Python 2.
|
||||
This makes ``bidict`` more efficient on Python 3
|
||||
and enables further improvement to bidict in the future.
|
||||
See `python3statement.org <https://python3statement.org>`__
|
||||
for more info.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
``pip install bidict``
|
||||
|
||||
|
||||
Quick Start
|
||||
-----------
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> from bidict import bidict
|
||||
>>> element_by_symbol = bidict({'H': 'hydrogen'})
|
||||
>>> element_by_symbol['H']
|
||||
'hydrogen'
|
||||
>>> element_by_symbol.inverse['hydrogen']
|
||||
'H'
|
||||
|
||||
|
||||
For more usage documentation,
|
||||
head to the :doc:`intro` [#fn-intro]_
|
||||
and proceed from there.
|
||||
|
||||
|
||||
Community Support
|
||||
-----------------
|
||||
|
||||
.. image:: https://img.shields.io/badge/chat-on%20gitter-5AB999.svg?logo=gitter-white
|
||||
:target: https://gitter.im/jab/bidict
|
||||
:alt: Chat
|
||||
|
||||
If you are thinking of using ``bidict`` in your work,
|
||||
or if you have any questions, comments, or suggestions,
|
||||
I'd love to know about your use case
|
||||
and provide as much voluntary support for it as possible.
|
||||
|
||||
Please feel free to leave a message in the
|
||||
`chatroom <https://gitter.im/jab/bidict>`__
|
||||
or open a new issue on GitHub.
|
||||
You can search through
|
||||
`existing issues <https://github.com/jab/bidict/issues>`__
|
||||
before creating a new one
|
||||
in case your questions or concerns have been adressed there already.
|
||||
|
||||
|
||||
Enterprise-Grade Support via Tidelift
|
||||
-------------------------------------
|
||||
|
||||
.. image:: https://img.shields.io/badge/tidelift-pro%20support-orange.svg
|
||||
:target: https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=readme
|
||||
:alt: Paid support available via Tidelift
|
||||
|
||||
If your use case requires a greater level of support,
|
||||
enterprise-grade support for ``bidict`` can be obtained via the
|
||||
`Tidelift subscription <https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=readme>`__.
|
||||
|
||||
|
||||
Notice of Usage
|
||||
---------------
|
||||
|
||||
If you use ``bidict``,
|
||||
and especially if your usage or your organization is significant in some way,
|
||||
please let me know.
|
||||
|
||||
You can:
|
||||
|
||||
- `star bidict on GitHub <https://github.com/jab/bidict>`__
|
||||
- `create an issue <https://github.com/jab/bidict/issues/new?title=Notice+of+Usage&body=I+am+using+bidict+for...>`__
|
||||
- leave a message in the `chat room <https://gitter.im/jab/bidict>`__
|
||||
- `email me <mailto:jabronson@gmail.com?subject=bidict&body=I%20am%20using%20bidict%20for...>`__
|
||||
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
See the :doc:`changelog` [#fn-changelog]_
|
||||
for a history of notable changes to ``bidict``.
|
||||
|
||||
|
||||
Release Notifications
|
||||
---------------------
|
||||
|
||||
.. duplicated in CHANGELOG.rst:
|
||||
(would use `.. include::` but GitHub doesn't understand it)
|
||||
|
||||
.. image:: https://img.shields.io/badge/libraries.io-subscribe-5BC0DF.svg
|
||||
:target: https://libraries.io/pypi/bidict
|
||||
:alt: Follow on libraries.io
|
||||
|
||||
Subscribe to releases
|
||||
`on GitHub <https://github.blog/changelog/2018-11-27-watch-releases/>`__ or
|
||||
`libraries.io <https://libraries.io/pypi/bidict>`__
|
||||
to be notified when new versions of ``bidict`` are released.
|
||||
|
||||
|
||||
Learning from ``bidict``
|
||||
------------------------
|
||||
|
||||
One of the best things about ``bidict``
|
||||
is that it touches a surprising number of
|
||||
interesting Python corners,
|
||||
especially given its small size and scope.
|
||||
|
||||
Check out :doc:`learning-from-bidict` [#fn-learning]_
|
||||
if you're interested in learning more.
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
``bidict`` is currently a one-person operation
|
||||
maintained on a voluntary basis.
|
||||
|
||||
Your help would be most welcome!
|
||||
|
||||
|
||||
Reviewers Wanted!
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
One of the most valuable ways to contribute to ``bidict`` –
|
||||
and to explore some interesting Python corners [#fn-learning]_
|
||||
while you're at it –
|
||||
is to review the relatively small codebase.
|
||||
|
||||
Please create an issue or pull request with any improvements you'd propose
|
||||
or any other results you found.
|
||||
Submitting a `draft PR <https://github.blog/2019-02-14-introducing-draft-pull-requests/>`__
|
||||
with feedback in inline code comments, or a
|
||||
`"Review results" issue <https://github.com/jab/bidict/issues/new?title=Review+results>`__,
|
||||
would each work well.
|
||||
|
||||
You can also
|
||||
+1 `this issue <https://github.com/jab/bidict/issues/63>`__
|
||||
to sign up to give feedback on future proposed changes
|
||||
that are in need of a reviewer.
|
||||
|
||||
|
||||
Giving Back
|
||||
^^^^^^^^^^^
|
||||
|
||||
.. duplicated in CONTRIBUTING.rst
|
||||
(would use `.. include::` but GitHub doesn't understand it)
|
||||
|
||||
``bidict`` is the product of hundreds of hours of unpaid, voluntary work.
|
||||
|
||||
If ``bidict`` has helped you accomplish your work,
|
||||
especially work you've been paid for,
|
||||
please consider chipping in toward the costs
|
||||
of its maintenance and development
|
||||
and/or ask your organization to do the same.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/jab/bidict/master/assets/support-on-gumroad.png
|
||||
:target: https://gumroad.com/l/bidict
|
||||
:alt: Support bidict
|
||||
|
||||
|
||||
Finding Documentation
|
||||
---------------------
|
||||
|
||||
If you're viewing this on `<https://bidict.readthedocs.io>`__,
|
||||
note that multiple versions of the documentation are available,
|
||||
and you can choose a different version using the popup menu at the bottom-right.
|
||||
Please make sure you're viewing the version of the documentation
|
||||
that corresponds to the version of ``bidict`` you'd like to use.
|
||||
|
||||
If you're viewing this on GitHub, PyPI, or some other place
|
||||
that can't render and link this documentation properly
|
||||
and are seeing broken links,
|
||||
try these alternate links instead:
|
||||
|
||||
.. [#fn-learning] `<docs/learning-from-bidict.rst>`__ | `<https://bidict.readthedocs.io/learning-from-bidict.html>`__
|
||||
|
||||
.. [#fn-changelog] `<CHANGELOG.rst>`__ | `<https://bidict.readthedocs.io/changelog.html>`__
|
||||
|
||||
.. [#fn-intro] `<docs/intro.rst>`__ | `<https://bidict.readthedocs.io/intro.html>`__
|
||||
|
||||
|
||||
----
|
||||
|
||||
Next: :doc:`intro` [#fn-intro]_
|
||||
|
||||
|
41
matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/RECORD
Normal file
41
matteo_env/Lib/site-packages/bidict-0.21.2.dist-info/RECORD
Normal file
|
@ -0,0 +1,41 @@
|
|||
bidict-0.21.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
bidict-0.21.2.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
||||
bidict-0.21.2.dist-info/METADATA,sha256=6p33oEnK6iIEBM4o7wQLGPUyeYHtc-yEW6_s05N3d5c,11630
|
||||
bidict-0.21.2.dist-info/RECORD,,
|
||||
bidict-0.21.2.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
|
||||
bidict-0.21.2.dist-info/top_level.txt,sha256=WuQO02jp0ODioS7sJoaHg3JJ5_3h6Sxo9RITvNGPYmc,7
|
||||
bidict/__init__.py,sha256=A2ZUK4jTHNN6T3QUaSh7xuIwc-Ytgw6gVLHNx07D7Fo,3910
|
||||
bidict/__pycache__/__init__.cpython-38.pyc,,
|
||||
bidict/__pycache__/_abc.cpython-38.pyc,,
|
||||
bidict/__pycache__/_base.cpython-38.pyc,,
|
||||
bidict/__pycache__/_bidict.cpython-38.pyc,,
|
||||
bidict/__pycache__/_delegating.cpython-38.pyc,,
|
||||
bidict/__pycache__/_dup.cpython-38.pyc,,
|
||||
bidict/__pycache__/_exc.cpython-38.pyc,,
|
||||
bidict/__pycache__/_frozenbidict.cpython-38.pyc,,
|
||||
bidict/__pycache__/_frozenordered.cpython-38.pyc,,
|
||||
bidict/__pycache__/_iter.cpython-38.pyc,,
|
||||
bidict/__pycache__/_mut.cpython-38.pyc,,
|
||||
bidict/__pycache__/_named.cpython-38.pyc,,
|
||||
bidict/__pycache__/_orderedbase.cpython-38.pyc,,
|
||||
bidict/__pycache__/_orderedbidict.cpython-38.pyc,,
|
||||
bidict/__pycache__/_typing.cpython-38.pyc,,
|
||||
bidict/__pycache__/_version.cpython-38.pyc,,
|
||||
bidict/__pycache__/metadata.cpython-38.pyc,,
|
||||
bidict/_abc.py,sha256=irEWsolFCp8ps77OKmWwB0gTrpXc5be0RBdHaQoPybk,4626
|
||||
bidict/_base.py,sha256=k7oLFwb_6ZMHMhfI217hnM-WfJ4oxVMTol1BG14E3cA,16180
|
||||
bidict/_bidict.py,sha256=85G1TyWeMZLE70HK-qwCVug-bCdaI3bIeoBxJzwSkkQ,2005
|
||||
bidict/_delegating.py,sha256=UibZewwgmN8iBECZtjELwKl5zhcuxYnyy2gsiAXBe3c,1313
|
||||
bidict/_dup.py,sha256=j0DSseguIdCgAhqxm0Zn2887110zx70F19Lvw7hiayg,1819
|
||||
bidict/_exc.py,sha256=nKOGqxqOvyjheh-Pgo-dZZWRRvPEWYyD8Ukm5XR8WNk,1053
|
||||
bidict/_frozenbidict.py,sha256=IYMIzsm9pAXTS819Tw7z_VTLIEZir4oLJbrcRc5yFP8,2494
|
||||
bidict/_frozenordered.py,sha256=E4kzBIoriZLuth9I1ll57KelvUN_xDAvZjQH7GNdn30,3224
|
||||
bidict/_iter.py,sha256=F9zoHs-IrkucujbRGnMJslH_Gc_Qrla4Mk1sOvn7ELg,2333
|
||||
bidict/_mut.py,sha256=MBXzglmeNJniRbdZ1C0Tx14pcsaBdi1NPaaFGIzZEpg,7352
|
||||
bidict/_named.py,sha256=_WQjoz9pE1d_HwVQX05vn5TthOREOw49yDdFSs5lvU4,3784
|
||||
bidict/_orderedbase.py,sha256=yMIRfDtY5DQJoAeI5YvIW49O42MuKqK8qxDrczr1NQY,12196
|
||||
bidict/_orderedbidict.py,sha256=tkfAMxehLetMqTrGoQq9KfdOpgRdhzWqp2lmk6_4vL0,3409
|
||||
bidict/_typing.py,sha256=3lq-wZhWGyn3q7euw6YK7LwFnxOVB1qdqX1x1HcW4Ng,862
|
||||
bidict/_version.py,sha256=e4Wu3F4t-gj1TaiLYadYEQ_3R8pNGz4Xi1K4eN1WFIw,117
|
||||
bidict/metadata.py,sha256=htEXequ7kpMnWeRKrl4cUJZBQIbBegxgu_bxFZ0pIkY,1812
|
||||
bidict/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.35.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1 @@
|
|||
bidict
|
94
matteo_env/Lib/site-packages/bidict/__init__.py
Normal file
94
matteo_env/Lib/site-packages/bidict/__init__.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# Current: __init__.py Next: _abc.py →
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""The bidirectional mapping library for Python.
|
||||
|
||||
bidict by example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>>> from bidict import bidict
|
||||
>>> element_by_symbol = bidict({'H': 'hydrogen'})
|
||||
>>> element_by_symbol['H']
|
||||
'hydrogen'
|
||||
>>> element_by_symbol.inverse['hydrogen']
|
||||
'H'
|
||||
|
||||
|
||||
Please see https://github.com/jab/bidict for the most up-to-date code and
|
||||
https://bidict.readthedocs.io for the most up-to-date documentation
|
||||
if you are reading this elsewhere.
|
||||
|
||||
|
||||
.. :copyright: (c) 2009-2020 Joshua Bronson.
|
||||
.. :license: MPLv2. See LICENSE for details.
|
||||
"""
|
||||
|
||||
# Use private aliases to not re-export these publicly (for Sphinx automodule with imported-members).
|
||||
from sys import version_info as _version_info
|
||||
|
||||
|
||||
if _version_info < (3, 6): # pragma: no cover
|
||||
raise ImportError('Python 3.6+ is required.')
|
||||
|
||||
# The rest of this file only collects functionality implemented in the rest of the
|
||||
# source for the purposes of exporting it under the `bidict` module namespace.
|
||||
# flake8: noqa: F401 (imported but unused)
|
||||
from ._abc import BidirectionalMapping, MutableBidirectionalMapping
|
||||
from ._base import BidictBase
|
||||
from ._mut import MutableBidict
|
||||
from ._bidict import bidict
|
||||
from ._frozenbidict import frozenbidict
|
||||
from ._frozenordered import FrozenOrderedBidict
|
||||
from ._named import namedbidict
|
||||
from ._orderedbase import OrderedBidictBase
|
||||
from ._orderedbidict import OrderedBidict
|
||||
from ._dup import ON_DUP_DEFAULT, ON_DUP_RAISE, ON_DUP_DROP_OLD, RAISE, DROP_OLD, DROP_NEW, OnDup, OnDupAction
|
||||
from ._exc import BidictException, DuplicationError, KeyDuplicationError, ValueDuplicationError, KeyAndValueDuplicationError
|
||||
from ._iter import inverted
|
||||
from .metadata import (
|
||||
__author__, __maintainer__, __copyright__, __email__, __credits__, __url__,
|
||||
__license__, __status__, __description__, __keywords__, __version__, __version_info__,
|
||||
)
|
||||
|
||||
# Set __module__ of re-exported classes to the 'bidict' top-level module name
|
||||
# so that private/internal submodules are not exposed to users e.g. in repr strings.
|
||||
_locals = tuple(locals().items())
|
||||
for _name, _obj in _locals: # pragma: no cover
|
||||
if not getattr(_obj, '__module__', '').startswith('bidict.'):
|
||||
continue
|
||||
try:
|
||||
_obj.__module__ = 'bidict'
|
||||
except AttributeError as exc: # raised when __module__ is read-only (as in OnDup)
|
||||
pass
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# Current: __init__.py Next: _abc.py →
|
||||
#==============================================================================
|
105
matteo_env/Lib/site-packages/bidict/_abc.py
Normal file
105
matteo_env/Lib/site-packages/bidict/_abc.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: __init__.py Current: _abc.py Next: _base.py →
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""Provide the :class:`BidirectionalMapping` abstract base class."""
|
||||
|
||||
import typing as _t
|
||||
from abc import abstractmethod
|
||||
|
||||
from ._typing import KT, VT
|
||||
|
||||
|
||||
class BidirectionalMapping(_t.Mapping[KT, VT]):
|
||||
"""Abstract base class (ABC) for bidirectional mapping types.
|
||||
|
||||
Extends :class:`collections.abc.Mapping` primarily by adding the
|
||||
(abstract) :attr:`inverse` property,
|
||||
which implementors of :class:`BidirectionalMapping`
|
||||
should override to return a reference to the inverse
|
||||
:class:`BidirectionalMapping` instance.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def inverse(self) -> 'BidirectionalMapping[VT, KT]':
|
||||
"""The inverse of this bidirectional mapping instance.
|
||||
|
||||
*See also* :attr:`bidict.BidictBase.inverse`, :attr:`bidict.BidictBase.inv`
|
||||
|
||||
:raises NotImplementedError: Meant to be overridden in subclasses.
|
||||
"""
|
||||
# The @abstractproperty decorator prevents BidirectionalMapping subclasses from being
|
||||
# instantiated unless they override this method. So users shouldn't be able to get to the
|
||||
# point where they can unintentionally call this implementation of .inverse on something
|
||||
# anyway. Could leave the method body empty, but raise NotImplementedError so it's extra
|
||||
# clear there's no reason to call this implementation (e.g. via super() after overriding).
|
||||
raise NotImplementedError
|
||||
|
||||
def __inverted__(self) -> _t.Iterator[_t.Tuple[VT, KT]]:
|
||||
"""Get an iterator over the items in :attr:`inverse`.
|
||||
|
||||
This is functionally equivalent to iterating over the items in the
|
||||
forward mapping and inverting each one on the fly, but this provides a
|
||||
more efficient implementation: Assuming the already-inverted items
|
||||
are stored in :attr:`inverse`, just return an iterator over them directly.
|
||||
|
||||
Providing this default implementation enables external functions,
|
||||
particularly :func:`~bidict.inverted`, to use this optimized
|
||||
implementation when available, instead of having to invert on the fly.
|
||||
|
||||
*See also* :func:`bidict.inverted`
|
||||
"""
|
||||
return iter(self.inverse.items())
|
||||
|
||||
def values(self) -> _t.AbstractSet[VT]: # type: ignore # https://github.com/python/typeshed/issues/4435
|
||||
"""A set-like object providing a view on the contained values.
|
||||
|
||||
Override the implementation inherited from
|
||||
:class:`~collections.abc.Mapping`.
|
||||
Because the values of a :class:`~bidict.BidirectionalMapping`
|
||||
are the keys of its inverse,
|
||||
this returns a :class:`~collections.abc.KeysView`
|
||||
rather than a :class:`~collections.abc.ValuesView`,
|
||||
which has the advantages of constant-time containment checks
|
||||
and supporting set operations.
|
||||
"""
|
||||
return self.inverse.keys()
|
||||
|
||||
|
||||
class MutableBidirectionalMapping(BidirectionalMapping[KT, VT], _t.MutableMapping[KT, VT]):
|
||||
"""Abstract base class (ABC) for mutable bidirectional mapping types."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: __init__.py Current: _abc.py Next: _base.py →
|
||||
#==============================================================================
|
383
matteo_env/Lib/site-packages/bidict/_base.py
Normal file
383
matteo_env/Lib/site-packages/bidict/_base.py
Normal file
|
@ -0,0 +1,383 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _abc.py Current: _base.py Next: _frozenbidict.py →
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`BidictBase`."""
|
||||
|
||||
import typing as _t
|
||||
from collections import namedtuple
|
||||
from copy import copy
|
||||
from weakref import ref
|
||||
|
||||
from ._abc import BidirectionalMapping
|
||||
from ._dup import ON_DUP_DEFAULT, RAISE, DROP_OLD, DROP_NEW, OnDup
|
||||
from ._exc import DuplicationError, KeyDuplicationError, ValueDuplicationError, KeyAndValueDuplicationError
|
||||
from ._iter import _iteritems_args_kw
|
||||
from ._typing import _NONE, KT, VT, OKT, OVT, IterItems, MapOrIterItems
|
||||
|
||||
|
||||
_WriteResult = namedtuple('_WriteResult', 'key val oldkey oldval')
|
||||
_DedupResult = namedtuple('_DedupResult', 'isdupkey isdupval invbyval fwdbykey')
|
||||
_NODUP = _DedupResult(False, False, _NONE, _NONE)
|
||||
|
||||
BT = _t.TypeVar('BT', bound='BidictBase') # typevar for BidictBase.copy
|
||||
|
||||
|
||||
class BidictBase(BidirectionalMapping[KT, VT]):
|
||||
"""Base class implementing :class:`BidirectionalMapping`."""
|
||||
|
||||
__slots__ = ['_fwdm', '_invm', '_inv', '_invweak', '_hash', '__weakref__']
|
||||
|
||||
#: The default :class:`~bidict.OnDup`
|
||||
#: that governs behavior when a provided item
|
||||
#: duplicates the key or value of other item(s).
|
||||
#:
|
||||
#: *See also* :ref:`basic-usage:Values Must Be Unique`, :doc:`extending`
|
||||
on_dup = ON_DUP_DEFAULT
|
||||
|
||||
_fwdm_cls = dict #: class of the backing forward mapping
|
||||
_invm_cls = dict #: class of the backing inverse mapping
|
||||
|
||||
#: The object used by :meth:`__repr__` for printing the contained items.
|
||||
_repr_delegate = dict
|
||||
|
||||
def __init_subclass__(cls, **kw):
|
||||
super().__init_subclass__(**kw)
|
||||
# Compute and set _inv_cls, the inverse of this bidict class.
|
||||
if '_inv_cls' in cls.__dict__:
|
||||
return
|
||||
if cls._fwdm_cls is cls._invm_cls:
|
||||
cls._inv_cls = cls
|
||||
return
|
||||
inv_cls = type(cls.__name__ + 'Inv', cls.__bases__, {
|
||||
**cls.__dict__,
|
||||
'_inv_cls': cls,
|
||||
'_fwdm_cls': cls._invm_cls,
|
||||
'_invm_cls': cls._fwdm_cls,
|
||||
})
|
||||
cls._inv_cls = inv_cls
|
||||
|
||||
@_t.overload
|
||||
def __init__(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def __init__(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def __init__(self, **kw: VT) -> None: ...
|
||||
def __init__(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
"""Make a new bidirectional dictionary.
|
||||
The signature behaves like that of :class:`dict`.
|
||||
Items passed in are added in the order they are passed,
|
||||
respecting the :attr:`on_dup` class attribute in the process.
|
||||
"""
|
||||
#: The backing :class:`~collections.abc.Mapping`
|
||||
#: storing the forward mapping data (*key* → *value*).
|
||||
self._fwdm: _t.Dict[KT, VT] = self._fwdm_cls()
|
||||
#: The backing :class:`~collections.abc.Mapping`
|
||||
#: storing the inverse mapping data (*value* → *key*).
|
||||
self._invm: _t.Dict[VT, KT] = self._invm_cls()
|
||||
self._init_inv()
|
||||
if args or kw:
|
||||
self._update(True, self.on_dup, *args, **kw)
|
||||
|
||||
def _init_inv(self) -> None:
|
||||
# Create the inverse bidict instance via __new__, bypassing its __init__ so that its
|
||||
# _fwdm and _invm can be assigned to this bidict's _invm and _fwdm. Store it in self._inv,
|
||||
# which holds a strong reference to a bidict's inverse, if one is available.
|
||||
self._inv = inv = self._inv_cls.__new__(self._inv_cls) # type: ignore
|
||||
inv._fwdm = self._invm
|
||||
inv._invm = self._fwdm
|
||||
# Only give the inverse a weak reference to this bidict to avoid creating a reference cycle,
|
||||
# stored in the _invweak attribute. See also the docs in
|
||||
# :ref:`addendum:Bidict Avoids Reference Cycles`
|
||||
inv._inv = None
|
||||
inv._invweak = ref(self)
|
||||
# Since this bidict has a strong reference to its inverse already, set its _invweak to None.
|
||||
self._invweak = None
|
||||
|
||||
@property
|
||||
def _isinv(self) -> bool:
|
||||
return self._inv is None
|
||||
|
||||
@property
|
||||
def inverse(self) -> 'BidictBase[VT, KT]':
|
||||
"""The inverse of this bidict."""
|
||||
# Resolve and return a strong reference to the inverse bidict.
|
||||
# One may be stored in self._inv already.
|
||||
if self._inv is not None:
|
||||
return self._inv # type: ignore
|
||||
# Otherwise a weakref is stored in self._invweak. Try to get a strong ref from it.
|
||||
assert self._invweak is not None
|
||||
inv = self._invweak()
|
||||
if inv is not None:
|
||||
return inv
|
||||
# Refcount of referent must have dropped to zero, as in `bidict().inv.inv`. Init a new one.
|
||||
self._init_inv() # Now this bidict will retain a strong ref to its inverse.
|
||||
return self._inv
|
||||
|
||||
#: Alias for :attr:`inverse`.
|
||||
inv = inverse
|
||||
|
||||
def __getstate__(self) -> dict:
|
||||
"""Needed to enable pickling due to use of :attr:`__slots__` and weakrefs.
|
||||
|
||||
*See also* :meth:`object.__getstate__`
|
||||
"""
|
||||
state = {}
|
||||
for cls in self.__class__.__mro__:
|
||||
slots = getattr(cls, '__slots__', ())
|
||||
for slot in slots:
|
||||
if hasattr(self, slot):
|
||||
state[slot] = getattr(self, slot)
|
||||
# weakrefs can't be pickled.
|
||||
state.pop('_invweak', None) # Added back in __setstate__ via _init_inv call.
|
||||
state.pop('__weakref__', None) # Not added back in __setstate__. Python manages this one.
|
||||
return state
|
||||
|
||||
def __setstate__(self, state: dict) -> None:
|
||||
"""Implemented because use of :attr:`__slots__` would prevent unpickling otherwise.
|
||||
|
||||
*See also* :meth:`object.__setstate__`
|
||||
"""
|
||||
for slot, value in state.items():
|
||||
setattr(self, slot, value)
|
||||
self._init_inv()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""See :func:`repr`."""
|
||||
clsname = self.__class__.__name__
|
||||
if not self:
|
||||
return f'{clsname}()'
|
||||
return f'{clsname}({self._repr_delegate(self.items())})'
|
||||
|
||||
# The inherited Mapping.__eq__ implementation would work, but it's implemented in terms of an
|
||||
# inefficient ``dict(self.items()) == dict(other.items())`` comparison, so override it with a
|
||||
# more efficient implementation.
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""*x.__eq__(other) ⟺ x == other*
|
||||
|
||||
Equivalent to *dict(x.items()) == dict(other.items())*
|
||||
but more efficient.
|
||||
|
||||
Note that :meth:`bidict's __eq__() <bidict.bidict.__eq__>` implementation
|
||||
is inherited by subclasses,
|
||||
in particular by the ordered bidict subclasses,
|
||||
so even with ordered bidicts,
|
||||
:ref:`== comparison is order-insensitive <eq-order-insensitive>`.
|
||||
|
||||
*See also* :meth:`bidict.FrozenOrderedBidict.equals_order_sensitive`
|
||||
"""
|
||||
if not isinstance(other, _t.Mapping) or len(self) != len(other):
|
||||
return False
|
||||
selfget = self.get
|
||||
return all(selfget(k, _NONE) == v for (k, v) in other.items()) # type: ignore
|
||||
|
||||
# The following methods are mutating and so are not public. But they are implemented in this
|
||||
# non-mutable base class (rather than the mutable `bidict` subclass) because they are used here
|
||||
# during initialization (starting with the `_update` method). (Why is this? Because `__init__`
|
||||
# and `update` share a lot of the same behavior (inserting the provided items while respecting
|
||||
# `on_dup`), so it makes sense for them to share implementation too.)
|
||||
def _pop(self, key: KT) -> VT:
|
||||
val = self._fwdm.pop(key)
|
||||
del self._invm[val]
|
||||
return val
|
||||
|
||||
def _put(self, key: KT, val: VT, on_dup: OnDup) -> None:
|
||||
dedup_result = self._dedup_item(key, val, on_dup)
|
||||
if dedup_result is not None:
|
||||
self._write_item(key, val, dedup_result)
|
||||
|
||||
def _dedup_item(self, key: KT, val: VT, on_dup: OnDup) -> _t.Optional[_DedupResult]:
|
||||
"""Check *key* and *val* for any duplication in self.
|
||||
|
||||
Handle any duplication as per the passed in *on_dup*.
|
||||
|
||||
(key, val) already present is construed as a no-op, not a duplication.
|
||||
|
||||
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
|
||||
:attr:`~bidict.DROP_NEW`, return None.
|
||||
|
||||
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
|
||||
:attr:`~bidict.RAISE`, raise the appropriate error.
|
||||
|
||||
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
|
||||
:attr:`~bidict.DROP_OLD`,
|
||||
or if no duplication is found,
|
||||
return the :class:`_DedupResult` *(isdupkey, isdupval, oldkey, oldval)*.
|
||||
"""
|
||||
fwdm = self._fwdm
|
||||
invm = self._invm
|
||||
oldval: OVT = fwdm.get(key, _NONE)
|
||||
oldkey: OKT = invm.get(val, _NONE)
|
||||
isdupkey = oldval is not _NONE
|
||||
isdupval = oldkey is not _NONE
|
||||
dedup_result = _DedupResult(isdupkey, isdupval, oldkey, oldval)
|
||||
if isdupkey and isdupval:
|
||||
if self._already_have(key, val, oldkey, oldval):
|
||||
# (key, val) duplicates an existing item -> no-op.
|
||||
return None
|
||||
# key and val each duplicate a different existing item.
|
||||
if on_dup.kv is RAISE:
|
||||
raise KeyAndValueDuplicationError(key, val)
|
||||
if on_dup.kv is DROP_NEW:
|
||||
return None
|
||||
assert on_dup.kv is DROP_OLD
|
||||
# Fall through to the return statement on the last line.
|
||||
elif isdupkey:
|
||||
if on_dup.key is RAISE:
|
||||
raise KeyDuplicationError(key)
|
||||
if on_dup.key is DROP_NEW:
|
||||
return None
|
||||
assert on_dup.key is DROP_OLD
|
||||
# Fall through to the return statement on the last line.
|
||||
elif isdupval:
|
||||
if on_dup.val is RAISE:
|
||||
raise ValueDuplicationError(val)
|
||||
if on_dup.val is DROP_NEW:
|
||||
return None
|
||||
assert on_dup.val is DROP_OLD
|
||||
# Fall through to the return statement on the last line.
|
||||
# else neither isdupkey nor isdupval.
|
||||
return dedup_result
|
||||
|
||||
@staticmethod
|
||||
def _already_have(key: KT, val: VT, oldkey: OKT, oldval: OVT) -> bool:
|
||||
# Overridden by _orderedbase.OrderedBidictBase.
|
||||
isdup = oldkey == key
|
||||
assert isdup == (oldval == val), f'{key} {val} {oldkey} {oldval}'
|
||||
return isdup
|
||||
|
||||
def _write_item(self, key: KT, val: VT, dedup_result: _DedupResult) -> _WriteResult:
|
||||
# Overridden by _orderedbase.OrderedBidictBase.
|
||||
isdupkey, isdupval, oldkey, oldval = dedup_result
|
||||
fwdm = self._fwdm
|
||||
invm = self._invm
|
||||
fwdm[key] = val
|
||||
invm[val] = key
|
||||
if isdupkey:
|
||||
del invm[oldval]
|
||||
if isdupval:
|
||||
del fwdm[oldkey]
|
||||
return _WriteResult(key, val, oldkey, oldval)
|
||||
|
||||
def _update(self, init: bool, on_dup: OnDup, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
# args[0] may be a generator that yields many items, so process input in a single pass.
|
||||
if not args and not kw:
|
||||
return
|
||||
can_skip_dup_check = not self and not kw and isinstance(args[0], BidirectionalMapping)
|
||||
if can_skip_dup_check:
|
||||
self._update_no_dup_check(args[0]) # type: ignore
|
||||
return
|
||||
can_skip_rollback = init or RAISE not in on_dup
|
||||
if can_skip_rollback:
|
||||
self._update_no_rollback(on_dup, *args, **kw)
|
||||
else:
|
||||
self._update_with_rollback(on_dup, *args, **kw)
|
||||
|
||||
def _update_no_dup_check(self, other: BidirectionalMapping[KT, VT]) -> None:
|
||||
write_item = self._write_item
|
||||
for (key, val) in other.items():
|
||||
write_item(key, val, _NODUP)
|
||||
|
||||
def _update_no_rollback(self, on_dup: OnDup, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
put = self._put
|
||||
for (key, val) in _iteritems_args_kw(*args, **kw):
|
||||
put(key, val, on_dup)
|
||||
|
||||
def _update_with_rollback(self, on_dup: OnDup, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
"""Update, rolling back on failure."""
|
||||
writes: _t.List[_t.Tuple[_DedupResult, _WriteResult]] = []
|
||||
append_write = writes.append
|
||||
dedup_item = self._dedup_item
|
||||
write_item = self._write_item
|
||||
for (key, val) in _iteritems_args_kw(*args, **kw):
|
||||
try:
|
||||
dedup_result = dedup_item(key, val, on_dup)
|
||||
except DuplicationError:
|
||||
undo_write = self._undo_write
|
||||
for dedup_result, write_result in reversed(writes):
|
||||
undo_write(dedup_result, write_result)
|
||||
raise
|
||||
if dedup_result is not None:
|
||||
write_result = write_item(key, val, dedup_result)
|
||||
append_write((dedup_result, write_result))
|
||||
|
||||
def _undo_write(self, dedup_result: _DedupResult, write_result: _WriteResult) -> None:
|
||||
isdupkey, isdupval, _, _ = dedup_result
|
||||
key, val, oldkey, oldval = write_result
|
||||
if not isdupkey and not isdupval:
|
||||
self._pop(key)
|
||||
return
|
||||
fwdm = self._fwdm
|
||||
invm = self._invm
|
||||
if isdupkey:
|
||||
fwdm[key] = oldval
|
||||
invm[oldval] = key
|
||||
if not isdupval:
|
||||
del invm[val]
|
||||
if isdupval:
|
||||
invm[val] = oldkey
|
||||
fwdm[oldkey] = val
|
||||
if not isdupkey:
|
||||
del fwdm[key]
|
||||
|
||||
def copy(self: BT) -> BT:
|
||||
"""A shallow copy."""
|
||||
# Could just ``return self.__class__(self)`` here instead, but the below is faster. It uses
|
||||
# __new__ to create a copy instance while bypassing its __init__, which would result
|
||||
# in copying this bidict's items into the copy instance one at a time. Instead, make whole
|
||||
# copies of each of the backing mappings, and make them the backing mappings of the copy,
|
||||
# avoiding copying items one at a time.
|
||||
cp = self.__class__.__new__(self.__class__)
|
||||
cp._fwdm = copy(self._fwdm)
|
||||
cp._invm = copy(self._invm)
|
||||
cp._init_inv()
|
||||
return cp # type: ignore
|
||||
|
||||
#: Used for the copy protocol.
|
||||
#: *See also* the :mod:`copy` module
|
||||
__copy__ = copy
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""The number of contained items."""
|
||||
return len(self._fwdm)
|
||||
|
||||
def __iter__(self) -> _t.Iterator[KT]:
|
||||
"""Iterator over the contained keys."""
|
||||
return iter(self._fwdm)
|
||||
|
||||
def __getitem__(self, key: KT) -> VT:
|
||||
"""*x.__getitem__(key) ⟺ x[key]*"""
|
||||
return self._fwdm[key]
|
||||
|
||||
|
||||
# Work around weakref slot with Generics bug on Python 3.6 (https://bugs.python.org/issue41451):
|
||||
BidictBase.__slots__.remove('__weakref__')
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _abc.py Current: _base.py Next: _frozenbidict.py →
|
||||
#==============================================================================
|
51
matteo_env/Lib/site-packages/bidict/_bidict.py
Normal file
51
matteo_env/Lib/site-packages/bidict/_bidict.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _mut.py Current: _bidict.py Next: _orderedbase.py →
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`bidict`."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
from ._delegating import _DelegatingBidict
|
||||
from ._mut import MutableBidict
|
||||
from ._typing import KT, VT
|
||||
|
||||
|
||||
class bidict(_DelegatingBidict[KT, VT], MutableBidict[KT, VT]):
|
||||
"""Base class for mutable bidirectional mappings."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if _t.TYPE_CHECKING:
|
||||
@property
|
||||
def inverse(self) -> 'bidict[VT, KT]': ...
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _mut.py Current: _bidict.py Next: _orderedbase.py →
|
||||
#==============================================================================
|
39
matteo_env/Lib/site-packages/bidict/_delegating.py
Normal file
39
matteo_env/Lib/site-packages/bidict/_delegating.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide :class:`_DelegatingBidict`."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
from ._base import BidictBase
|
||||
from ._typing import KT, VT
|
||||
|
||||
|
||||
class _DelegatingBidict(BidictBase[KT, VT]):
|
||||
"""Provide optimized implementations of several methods by delegating to backing dicts.
|
||||
|
||||
Used to override less efficient implementations inherited by :class:`~collections.abc.Mapping`.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __iter__(self) -> _t.Iterator[KT]:
|
||||
"""Iterator over the contained keys."""
|
||||
return iter(self._fwdm)
|
||||
|
||||
def keys(self) -> _t.KeysView[KT]:
|
||||
"""A set-like object providing a view on the contained keys."""
|
||||
return self._fwdm.keys()
|
||||
|
||||
def values(self) -> _t.KeysView[VT]: # type: ignore # https://github.com/python/typeshed/issues/4435
|
||||
"""A set-like object providing a view on the contained values."""
|
||||
return self._invm.keys()
|
||||
|
||||
def items(self) -> _t.ItemsView[KT, VT]:
|
||||
"""A set-like object providing a view on the contained items."""
|
||||
return self._fwdm.items()
|
58
matteo_env/Lib/site-packages/bidict/_dup.py
Normal file
58
matteo_env/Lib/site-packages/bidict/_dup.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide :class:`OnDup` and related functionality."""
|
||||
|
||||
|
||||
from collections import namedtuple
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class OnDupAction(Enum):
|
||||
"""An action to take to prevent duplication from occurring."""
|
||||
|
||||
#: Raise a :class:`~bidict.DuplicationError`.
|
||||
RAISE = 'RAISE'
|
||||
#: Overwrite existing items with new items.
|
||||
DROP_OLD = 'DROP_OLD'
|
||||
#: Keep existing items and drop new items.
|
||||
DROP_NEW = 'DROP_NEW'
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'<{self.name}>'
|
||||
|
||||
|
||||
RAISE = OnDupAction.RAISE
|
||||
DROP_OLD = OnDupAction.DROP_OLD
|
||||
DROP_NEW = OnDupAction.DROP_NEW
|
||||
|
||||
|
||||
class OnDup(namedtuple('_OnDup', 'key val kv')):
|
||||
r"""A 3-tuple of :class:`OnDupAction`\s specifying how to handle the 3 kinds of duplication.
|
||||
|
||||
*See also* :ref:`basic-usage:Values Must Be Unique`
|
||||
|
||||
If *kv* is not specified, *val* will be used for *kv*.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, key: OnDupAction = DROP_OLD, val: OnDupAction = RAISE, kv: OnDupAction = RAISE) -> 'OnDup':
|
||||
"""Override to provide user-friendly default values."""
|
||||
return super().__new__(cls, key, val, kv or val)
|
||||
|
||||
|
||||
#: Default :class:`OnDup` used for the
|
||||
#: :meth:`~bidict.bidict.__init__`,
|
||||
#: :meth:`~bidict.bidict.__setitem__`, and
|
||||
#: :meth:`~bidict.bidict.update` methods.
|
||||
ON_DUP_DEFAULT = OnDup()
|
||||
#: An :class:`OnDup` whose members are all :obj:`RAISE`.
|
||||
ON_DUP_RAISE = OnDup(key=RAISE, val=RAISE, kv=RAISE)
|
||||
#: An :class:`OnDup` whose members are all :obj:`DROP_OLD`.
|
||||
ON_DUP_DROP_OLD = OnDup(key=DROP_OLD, val=DROP_OLD, kv=DROP_OLD)
|
35
matteo_env/Lib/site-packages/bidict/_exc.py
Normal file
35
matteo_env/Lib/site-packages/bidict/_exc.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide all bidict exceptions."""
|
||||
|
||||
|
||||
class BidictException(Exception):
|
||||
"""Base class for bidict exceptions."""
|
||||
|
||||
|
||||
class DuplicationError(BidictException):
|
||||
"""Base class for exceptions raised when uniqueness is violated
|
||||
as per the :attr:~bidict.RAISE` :class:`~bidict.OnDupAction`.
|
||||
"""
|
||||
|
||||
|
||||
class KeyDuplicationError(DuplicationError):
|
||||
"""Raised when a given key is not unique."""
|
||||
|
||||
|
||||
class ValueDuplicationError(DuplicationError):
|
||||
"""Raised when a given value is not unique."""
|
||||
|
||||
|
||||
class KeyAndValueDuplicationError(KeyDuplicationError, ValueDuplicationError):
|
||||
"""Raised when a given item's key and value are not unique.
|
||||
|
||||
That is, its key duplicates that of another item,
|
||||
and its value duplicates that of a different other item.
|
||||
"""
|
58
matteo_env/Lib/site-packages/bidict/_frozenbidict.py
Normal file
58
matteo_env/Lib/site-packages/bidict/_frozenbidict.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _base.py Current: _frozenbidict.py Next: _mut.py →
|
||||
#==============================================================================
|
||||
|
||||
"""Provide :class:`frozenbidict`, an immutable, hashable bidirectional mapping type."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
from ._delegating import _DelegatingBidict
|
||||
from ._typing import KT, VT
|
||||
|
||||
|
||||
class frozenbidict(_DelegatingBidict[KT, VT]):
|
||||
"""Immutable, hashable bidict type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
# Work around lack of support for higher-kinded types in mypy.
|
||||
# Ref: https://github.com/python/typing/issues/548#issuecomment-621571821
|
||||
# Remove this and similar type stubs from other classes if support is ever added.
|
||||
if _t.TYPE_CHECKING:
|
||||
@property
|
||||
def inverse(self) -> 'frozenbidict[VT, KT]': ...
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""The hash of this bidict as determined by its items."""
|
||||
if getattr(self, '_hash', None) is None:
|
||||
self._hash = _t.ItemsView(self)._hash() # type: ignore
|
||||
return self._hash # type: ignore
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _base.py Current: _frozenbidict.py Next: _mut.py →
|
||||
#==============================================================================
|
75
matteo_env/Lib/site-packages/bidict/_frozenordered.py
Normal file
75
matteo_env/Lib/site-packages/bidict/_frozenordered.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
#← Prev: _orderedbase.py Current: _frozenordered.py Next: _orderedbidict.py →
|
||||
#==============================================================================
|
||||
|
||||
"""Provide :class:`FrozenOrderedBidict`, an immutable, hashable, ordered bidict."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
from ._frozenbidict import frozenbidict
|
||||
from ._orderedbase import OrderedBidictBase
|
||||
from ._typing import KT, VT
|
||||
|
||||
|
||||
class FrozenOrderedBidict(OrderedBidictBase[KT, VT]):
|
||||
"""Hashable, immutable, ordered bidict type."""
|
||||
|
||||
__slots__ = ()
|
||||
__hash__ = frozenbidict.__hash__
|
||||
|
||||
if _t.TYPE_CHECKING:
|
||||
@property
|
||||
def inverse(self) -> 'FrozenOrderedBidict[VT, KT]': ...
|
||||
|
||||
# Assume the Python implementation's dict type is ordered (e.g. PyPy or CPython >= 3.6), so we
|
||||
# can delegate to `_fwdm` and `_invm` for faster implementations of several methods. Both
|
||||
# `_fwdm` and `_invm` will always be initialized with the provided items in the correct order,
|
||||
# and since `FrozenOrderedBidict` is immutable, their respective orders can't get out of sync
|
||||
# after a mutation.
|
||||
def __iter__(self) -> _t.Iterator[KT]:
|
||||
"""Iterator over the contained keys in insertion order."""
|
||||
return self._iter()
|
||||
|
||||
def _iter(self, *, reverse: bool = False) -> _t.Iterator[KT]:
|
||||
if reverse:
|
||||
return super()._iter(reverse=True)
|
||||
return iter(self._fwdm._fwdm)
|
||||
|
||||
def keys(self) -> _t.KeysView[KT]:
|
||||
"""A set-like object providing a view on the contained keys."""
|
||||
return self._fwdm._fwdm.keys()
|
||||
|
||||
def values(self) -> _t.KeysView[VT]: # type: ignore
|
||||
"""A set-like object providing a view on the contained values."""
|
||||
return self._invm._fwdm.keys()
|
||||
|
||||
# We can't delegate for items because values in `_fwdm` are nodes.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
#← Prev: _orderedbase.py Current: _frozenordered.py Next: _orderedbidict.py →
|
||||
#==============================================================================
|
67
matteo_env/Lib/site-packages/bidict/_iter.py
Normal file
67
matteo_env/Lib/site-packages/bidict/_iter.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Functions for iterating over items in a mapping."""
|
||||
|
||||
import typing as _t
|
||||
from collections.abc import Mapping
|
||||
from itertools import chain, repeat
|
||||
|
||||
from ._typing import KT, VT, IterItems, MapOrIterItems
|
||||
|
||||
|
||||
_NULL_IT = repeat(None, 0) # repeat 0 times -> raise StopIteration from the start
|
||||
|
||||
|
||||
def _iteritems_mapping_or_iterable(arg: MapOrIterItems[KT, VT]) -> IterItems[KT, VT]:
|
||||
"""Yield the items in *arg*.
|
||||
|
||||
If *arg* is a :class:`~collections.abc.Mapping`, return an iterator over its items.
|
||||
Otherwise return an iterator over *arg* itself.
|
||||
"""
|
||||
return iter(arg.items() if isinstance(arg, Mapping) else arg)
|
||||
|
||||
|
||||
def _iteritems_args_kw(*args: MapOrIterItems[KT, VT], **kw: VT) -> IterItems[KT, VT]:
|
||||
"""Yield the items from the positional argument (if given) and then any from *kw*.
|
||||
|
||||
:raises TypeError: if more than one positional argument is given.
|
||||
"""
|
||||
args_len = len(args)
|
||||
if args_len > 1:
|
||||
raise TypeError(f'Expected at most 1 positional argument, got {args_len}')
|
||||
itemchain = None
|
||||
if args:
|
||||
arg = args[0]
|
||||
if arg:
|
||||
itemchain = _iteritems_mapping_or_iterable(arg)
|
||||
if kw:
|
||||
iterkw = iter(kw.items())
|
||||
itemchain = chain(itemchain, iterkw) if itemchain else iterkw # type: ignore
|
||||
return itemchain or _NULL_IT # type: ignore
|
||||
|
||||
|
||||
@_t.overload
|
||||
def inverted(arg: _t.Mapping[KT, VT]) -> IterItems[VT, KT]: ...
|
||||
@_t.overload
|
||||
def inverted(arg: IterItems[KT, VT]) -> IterItems[VT, KT]: ...
|
||||
def inverted(arg: MapOrIterItems[KT, VT]) -> IterItems[VT, KT]:
|
||||
"""Yield the inverse items of the provided object.
|
||||
|
||||
If *arg* has a :func:`callable` ``__inverted__`` attribute,
|
||||
return the result of calling it.
|
||||
|
||||
Otherwise, return an iterator over the items in `arg`,
|
||||
inverting each item on the fly.
|
||||
|
||||
*See also* :attr:`bidict.BidirectionalMapping.__inverted__`
|
||||
"""
|
||||
inv = getattr(arg, '__inverted__', None)
|
||||
if callable(inv):
|
||||
return inv() # type: ignore
|
||||
return ((val, key) for (key, val) in _iteritems_mapping_or_iterable(arg))
|
188
matteo_env/Lib/site-packages/bidict/_mut.py
Normal file
188
matteo_env/Lib/site-packages/bidict/_mut.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _frozenbidict.py Current: _mut.py Next: _bidict.py →
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`MutableBidict`."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
from ._abc import MutableBidirectionalMapping
|
||||
from ._base import BidictBase
|
||||
from ._dup import OnDup, ON_DUP_RAISE, ON_DUP_DROP_OLD
|
||||
from ._typing import _NONE, KT, VT, VDT, IterItems, MapOrIterItems
|
||||
|
||||
|
||||
class MutableBidict(BidictBase[KT, VT], MutableBidirectionalMapping[KT, VT]):
|
||||
"""Base class for mutable bidirectional mappings."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if _t.TYPE_CHECKING:
|
||||
@property
|
||||
def inverse(self) -> 'MutableBidict[VT, KT]': ...
|
||||
|
||||
def __delitem__(self, key: KT) -> None:
|
||||
"""*x.__delitem__(y) ⟺ del x[y]*"""
|
||||
self._pop(key)
|
||||
|
||||
def __setitem__(self, key: KT, val: VT) -> None:
|
||||
"""Set the value for *key* to *val*.
|
||||
|
||||
If *key* is already associated with *val*, this is a no-op.
|
||||
|
||||
If *key* is already associated with a different value,
|
||||
the old value will be replaced with *val*,
|
||||
as with dict's :meth:`__setitem__`.
|
||||
|
||||
If *val* is already associated with a different key,
|
||||
an exception is raised
|
||||
to protect against accidental removal of the key
|
||||
that's currently associated with *val*.
|
||||
|
||||
Use :meth:`put` instead if you want to specify different behavior in
|
||||
the case that the provided key or value duplicates an existing one.
|
||||
Or use :meth:`forceput` to unconditionally associate *key* with *val*,
|
||||
replacing any existing items as necessary to preserve uniqueness.
|
||||
|
||||
:raises bidict.ValueDuplicationError: if *val* duplicates that of an
|
||||
existing item.
|
||||
|
||||
:raises bidict.KeyAndValueDuplicationError: if *key* duplicates the key of an
|
||||
existing item and *val* duplicates the value of a different
|
||||
existing item.
|
||||
"""
|
||||
self._put(key, val, self.on_dup)
|
||||
|
||||
def put(self, key: KT, val: VT, on_dup: OnDup = ON_DUP_RAISE) -> None:
|
||||
"""Associate *key* with *val*, honoring the :class:`OnDup` given in *on_dup*.
|
||||
|
||||
For example, if *on_dup* is :attr:`~bidict.ON_DUP_RAISE`,
|
||||
then *key* will be associated with *val* if and only if
|
||||
*key* is not already associated with an existing value and
|
||||
*val* is not already associated with an existing key,
|
||||
otherwise an exception will be raised.
|
||||
|
||||
If *key* is already associated with *val*, this is a no-op.
|
||||
|
||||
:raises bidict.KeyDuplicationError: if attempting to insert an item
|
||||
whose key only duplicates an existing item's, and *on_dup.key* is
|
||||
:attr:`~bidict.RAISE`.
|
||||
|
||||
:raises bidict.ValueDuplicationError: if attempting to insert an item
|
||||
whose value only duplicates an existing item's, and *on_dup.val* is
|
||||
:attr:`~bidict.RAISE`.
|
||||
|
||||
:raises bidict.KeyAndValueDuplicationError: if attempting to insert an
|
||||
item whose key duplicates one existing item's, and whose value
|
||||
duplicates another existing item's, and *on_dup.kv* is
|
||||
:attr:`~bidict.RAISE`.
|
||||
"""
|
||||
self._put(key, val, on_dup)
|
||||
|
||||
def forceput(self, key: KT, val: VT) -> None:
|
||||
"""Associate *key* with *val* unconditionally.
|
||||
|
||||
Replace any existing mappings containing key *key* or value *val*
|
||||
as necessary to preserve uniqueness.
|
||||
"""
|
||||
self._put(key, val, ON_DUP_DROP_OLD)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Remove all items."""
|
||||
self._fwdm.clear()
|
||||
self._invm.clear()
|
||||
|
||||
@_t.overload
|
||||
def pop(self, key: KT) -> VT: ...
|
||||
@_t.overload
|
||||
def pop(self, key: KT, default: VDT = ...) -> VDT: ...
|
||||
def pop(self, key: KT, default: VDT = _NONE) -> VDT:
|
||||
"""*x.pop(k[, d]) → v*
|
||||
|
||||
Remove specified key and return the corresponding value.
|
||||
|
||||
:raises KeyError: if *key* is not found and no *default* is provided.
|
||||
"""
|
||||
try:
|
||||
return self._pop(key)
|
||||
except KeyError:
|
||||
if default is _NONE:
|
||||
raise
|
||||
return default
|
||||
|
||||
def popitem(self) -> _t.Tuple[KT, VT]:
|
||||
"""*x.popitem() → (k, v)*
|
||||
|
||||
Remove and return some item as a (key, value) pair.
|
||||
|
||||
:raises KeyError: if *x* is empty.
|
||||
"""
|
||||
if not self:
|
||||
raise KeyError('mapping is empty')
|
||||
key, val = self._fwdm.popitem()
|
||||
del self._invm[val]
|
||||
return key, val
|
||||
|
||||
@_t.overload
|
||||
def update(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def update(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def update(self, **kw: VT) -> None: ...
|
||||
def update(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
"""Like calling :meth:`putall` with *self.on_dup* passed for *on_dup*."""
|
||||
if args or kw:
|
||||
self._update(False, self.on_dup, *args, **kw)
|
||||
|
||||
@_t.overload
|
||||
def forceupdate(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def forceupdate(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def forceupdate(self, **kw: VT) -> None: ...
|
||||
def forceupdate(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
"""Like a bulk :meth:`forceput`."""
|
||||
self._update(False, ON_DUP_DROP_OLD, *args, **kw)
|
||||
|
||||
@_t.overload
|
||||
def putall(self, items: _t.Mapping[KT, VT], on_dup: OnDup) -> None: ...
|
||||
@_t.overload
|
||||
def putall(self, items: IterItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None: ...
|
||||
def putall(self, items: MapOrIterItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None:
|
||||
"""Like a bulk :meth:`put`.
|
||||
|
||||
If one of the given items causes an exception to be raised,
|
||||
none of the items is inserted.
|
||||
"""
|
||||
if items:
|
||||
self._update(False, on_dup, items)
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _frozenbidict.py Current: _mut.py Next: _bidict.py →
|
||||
#==============================================================================
|
99
matteo_env/Lib/site-packages/bidict/_named.py
Normal file
99
matteo_env/Lib/site-packages/bidict/_named.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
"""Provide :func:`bidict.namedbidict`."""
|
||||
|
||||
import typing as _t
|
||||
from sys import _getframe
|
||||
|
||||
from ._abc import BidirectionalMapping, KT, VT
|
||||
from ._bidict import bidict
|
||||
|
||||
|
||||
def namedbidict(
|
||||
typename: str,
|
||||
keyname: str,
|
||||
valname: str,
|
||||
*,
|
||||
base_type: _t.Type[BidirectionalMapping[KT, VT]] = bidict,
|
||||
) -> _t.Type[BidirectionalMapping[KT, VT]]:
|
||||
r"""Create a new subclass of *base_type* with custom accessors.
|
||||
|
||||
Like :func:`collections.namedtuple` for bidicts.
|
||||
|
||||
The new class's ``__name__`` and ``__qualname__`` will be set to *typename*,
|
||||
and its ``__module__`` will be set to the caller's module.
|
||||
|
||||
Instances of the new class will provide access to their
|
||||
:attr:`inverse <BidirectionalMapping.inverse>` instances
|
||||
via the custom *keyname*\_for property,
|
||||
and access to themselves
|
||||
via the custom *valname*\_for property.
|
||||
|
||||
*See also* the :ref:`namedbidict usage documentation
|
||||
<other-bidict-types:\:func\:\`~bidict.namedbidict\`>`
|
||||
|
||||
:raises ValueError: if any of the *typename*, *keyname*, or *valname*
|
||||
strings is not a valid Python identifier, or if *keyname == valname*.
|
||||
|
||||
:raises TypeError: if *base_type* is not a :class:`BidirectionalMapping` subclass
|
||||
that provides ``_isinv`` and :meth:`~object.__getstate__` attributes.
|
||||
(Any :class:`~bidict.BidictBase` subclass can be passed in, including all the
|
||||
concrete bidict types pictured in the :ref:`other-bidict-types:Bidict Types Diagram`.
|
||||
"""
|
||||
if not issubclass(base_type, BidirectionalMapping) or not all(hasattr(base_type, i) for i in ('_isinv', '__getstate__')):
|
||||
raise TypeError(base_type)
|
||||
names = (typename, keyname, valname)
|
||||
if not all(map(str.isidentifier, names)) or keyname == valname:
|
||||
raise ValueError(names)
|
||||
|
||||
class _Named(base_type): # type: ignore
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def _getfwd(self) -> '_Named':
|
||||
return self.inverse if self._isinv else self # type: ignore
|
||||
|
||||
def _getinv(self) -> '_Named':
|
||||
return self if self._isinv else self.inverse # type: ignore
|
||||
|
||||
@property
|
||||
def _keyname(self) -> str:
|
||||
return valname if self._isinv else keyname
|
||||
|
||||
@property
|
||||
def _valname(self) -> str:
|
||||
return keyname if self._isinv else valname
|
||||
|
||||
def __reduce__(self) -> '_t.Tuple[_t.Callable[[str, str, str, _t.Type[BidirectionalMapping]], BidirectionalMapping], _t.Tuple[str, str, str, _t.Type[BidirectionalMapping]], dict]':
|
||||
return (_make_empty, (typename, keyname, valname, base_type), self.__getstate__())
|
||||
|
||||
bname = base_type.__name__
|
||||
fname = valname + '_for'
|
||||
iname = keyname + '_for'
|
||||
fdoc = f'{typename} forward {bname}: {keyname} → {valname}'
|
||||
idoc = f'{typename} inverse {bname}: {valname} → {keyname}'
|
||||
setattr(_Named, fname, property(_Named._getfwd, doc=fdoc))
|
||||
setattr(_Named, iname, property(_Named._getinv, doc=idoc))
|
||||
|
||||
_Named.__name__ = typename
|
||||
_Named.__qualname__ = typename
|
||||
_Named.__module__ = _getframe(1).f_globals.get('__name__') # type: ignore
|
||||
return _Named
|
||||
|
||||
|
||||
def _make_empty(
|
||||
typename: str,
|
||||
keyname: str,
|
||||
valname: str,
|
||||
base_type: _t.Type[BidirectionalMapping] = bidict,
|
||||
) -> BidirectionalMapping:
|
||||
"""Create a named bidict with the indicated arguments and return an empty instance.
|
||||
Used to make :func:`bidict.namedbidict` instances picklable.
|
||||
"""
|
||||
cls = namedbidict(typename, keyname, valname, base_type=base_type)
|
||||
return cls()
|
314
matteo_env/Lib/site-packages/bidict/_orderedbase.py
Normal file
314
matteo_env/Lib/site-packages/bidict/_orderedbase.py
Normal file
|
@ -0,0 +1,314 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _bidict.py Current: _orderedbase.py Next: _frozenordered.py →
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`OrderedBidictBase`."""
|
||||
|
||||
import typing as _t
|
||||
from copy import copy
|
||||
from weakref import ref
|
||||
|
||||
from ._base import _NONE, _DedupResult, _WriteResult, BidictBase, BT
|
||||
from ._bidict import bidict
|
||||
from ._typing import KT, VT, IterItems, MapOrIterItems
|
||||
|
||||
|
||||
class _Node:
|
||||
"""A node in a circular doubly-linked list
|
||||
used to encode the order of items in an ordered bidict.
|
||||
|
||||
Only weak references to the next and previous nodes
|
||||
are held to avoid creating strong reference cycles.
|
||||
|
||||
Because an ordered bidict retains two strong references
|
||||
to each node instance (one from its backing `_fwdm` mapping
|
||||
and one from its `_invm` mapping), a node's refcount will not
|
||||
drop to zero (and so will not be garbage collected) as long as
|
||||
the ordered bidict that contains it is still alive.
|
||||
Because nodes don't have strong reference cycles,
|
||||
once their containing bidict is freed,
|
||||
they too are immediately freed.
|
||||
"""
|
||||
|
||||
__slots__ = ('_prv', '_nxt', '__weakref__')
|
||||
|
||||
def __init__(self, prv: '_Node' = None, nxt: '_Node' = None) -> None:
|
||||
self._setprv(prv)
|
||||
self._setnxt(nxt)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
clsname = self.__class__.__name__
|
||||
prv = id(self.prv)
|
||||
nxt = id(self.nxt)
|
||||
return f'{clsname}(prv={prv}, self={id(self)}, nxt={nxt})'
|
||||
|
||||
def _getprv(self) -> '_t.Optional[_Node]':
|
||||
return self._prv() if isinstance(self._prv, ref) else self._prv
|
||||
|
||||
def _setprv(self, prv: '_t.Optional[_Node]') -> None:
|
||||
self._prv = prv and ref(prv)
|
||||
|
||||
prv = property(_getprv, _setprv)
|
||||
|
||||
def _getnxt(self) -> '_t.Optional[_Node]':
|
||||
return self._nxt() if isinstance(self._nxt, ref) else self._nxt
|
||||
|
||||
def _setnxt(self, nxt: '_t.Optional[_Node]') -> None:
|
||||
self._nxt = nxt and ref(nxt)
|
||||
|
||||
nxt = property(_getnxt, _setnxt)
|
||||
|
||||
def __getstate__(self) -> dict:
|
||||
"""Return the instance state dictionary
|
||||
but with weakrefs converted to strong refs
|
||||
so that it can be pickled.
|
||||
|
||||
*See also* :meth:`object.__getstate__`
|
||||
"""
|
||||
return dict(_prv=self.prv, _nxt=self.nxt)
|
||||
|
||||
def __setstate__(self, state: dict) -> None:
|
||||
"""Set the instance state from *state*."""
|
||||
self._setprv(state['_prv'])
|
||||
self._setnxt(state['_nxt'])
|
||||
|
||||
|
||||
class _SentinelNode(_Node):
|
||||
"""Special node in a circular doubly-linked list
|
||||
that links the first node with the last node.
|
||||
When its next and previous references point back to itself
|
||||
it represents an empty list.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self, prv: _Node = None, nxt: _Node = None) -> None:
|
||||
super().__init__(prv or self, nxt or self)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<SNTL>'
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return False
|
||||
|
||||
def _iter(self, *, reverse: bool = False) -> _t.Iterator[_Node]:
|
||||
"""Iterator yielding nodes in the requested order,
|
||||
i.e. traverse the linked list via :attr:`nxt`
|
||||
(or :attr:`prv` if *reverse* is truthy)
|
||||
until reaching a falsy (i.e. sentinel) node.
|
||||
"""
|
||||
attr = 'prv' if reverse else 'nxt'
|
||||
node = getattr(self, attr)
|
||||
while node:
|
||||
yield node
|
||||
node = getattr(node, attr)
|
||||
|
||||
|
||||
class OrderedBidictBase(BidictBase[KT, VT]):
|
||||
"""Base class implementing an ordered :class:`BidirectionalMapping`."""
|
||||
|
||||
__slots__ = ('_sntl',)
|
||||
|
||||
_fwdm_cls = bidict # type: ignore
|
||||
_invm_cls = bidict # type: ignore
|
||||
|
||||
#: The object used by :meth:`__repr__` for printing the contained items.
|
||||
_repr_delegate = list # type: ignore
|
||||
|
||||
@_t.overload
|
||||
def __init__(self, __arg: _t.Mapping[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def __init__(self, __arg: IterItems[KT, VT], **kw: VT) -> None: ...
|
||||
@_t.overload
|
||||
def __init__(self, **kw: VT) -> None: ...
|
||||
def __init__(self, *args: MapOrIterItems[KT, VT], **kw: VT) -> None:
|
||||
"""Make a new ordered bidirectional mapping.
|
||||
The signature behaves like that of :class:`dict`.
|
||||
Items passed in are added in the order they are passed,
|
||||
respecting the :attr:`on_dup` class attribute in the process.
|
||||
|
||||
The order in which items are inserted is remembered,
|
||||
similar to :class:`collections.OrderedDict`.
|
||||
"""
|
||||
self._sntl = _SentinelNode()
|
||||
|
||||
# Like unordered bidicts, ordered bidicts also store two backing one-directional mappings
|
||||
# `_fwdm` and `_invm`. But rather than mapping `key` to `val` and `val` to `key`
|
||||
# (respectively), they map `key` to `nodefwd` and `val` to `nodeinv` (respectively), where
|
||||
# `nodefwd` is `nodeinv` when `key` and `val` are associated with one another.
|
||||
|
||||
# To effect this difference, `_write_item` and `_undo_write` are overridden. But much of the
|
||||
# rest of BidictBase's implementation, including BidictBase.__init__ and BidictBase._update,
|
||||
# are inherited and are able to be reused without modification.
|
||||
super().__init__(*args, **kw)
|
||||
|
||||
if _t.TYPE_CHECKING:
|
||||
@property
|
||||
def inverse(self) -> 'OrderedBidictBase[VT, KT]': ...
|
||||
_fwdm: bidict[KT, _Node] # type: ignore
|
||||
_invm: bidict[VT, _Node] # type: ignore
|
||||
|
||||
def _init_inv(self) -> None:
|
||||
super()._init_inv()
|
||||
self.inverse._sntl = self._sntl
|
||||
|
||||
# Can't reuse BidictBase.copy since ordered bidicts have different internal structure.
|
||||
def copy(self: BT) -> BT:
|
||||
"""A shallow copy of this ordered bidict."""
|
||||
# Fast copy implementation bypassing __init__. See comments in :meth:`BidictBase.copy`.
|
||||
cp = self.__class__.__new__(self.__class__)
|
||||
sntl = _SentinelNode()
|
||||
fwdm = copy(self._fwdm)
|
||||
invm = copy(self._invm)
|
||||
cur = sntl
|
||||
nxt = sntl.nxt
|
||||
for (key, val) in self.items():
|
||||
nxt = _Node(cur, sntl)
|
||||
cur.nxt = fwdm[key] = invm[val] = nxt
|
||||
cur = nxt
|
||||
sntl.prv = nxt
|
||||
cp._sntl = sntl
|
||||
cp._fwdm = fwdm
|
||||
cp._invm = invm
|
||||
cp._init_inv()
|
||||
return cp # type: ignore
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
def __getitem__(self, key: KT) -> VT:
|
||||
nodefwd = self._fwdm[key]
|
||||
val = self._invm.inverse[nodefwd]
|
||||
return val
|
||||
|
||||
def _pop(self, key: KT) -> VT:
|
||||
nodefwd = self._fwdm.pop(key)
|
||||
val = self._invm.inverse.pop(nodefwd)
|
||||
nodefwd.prv.nxt = nodefwd.nxt
|
||||
nodefwd.nxt.prv = nodefwd.prv
|
||||
return val
|
||||
|
||||
@staticmethod
|
||||
def _already_have(key: KT, val: VT, nodeinv: _Node, nodefwd: _Node) -> bool: # type: ignore
|
||||
# Overrides _base.BidictBase.
|
||||
return nodeinv is nodefwd
|
||||
|
||||
def _write_item(self, key: KT, val: VT, dedup_result: _DedupResult) -> _WriteResult:
|
||||
# Overrides _base.BidictBase.
|
||||
fwdm = self._fwdm # bidict mapping keys to nodes
|
||||
invm = self._invm # bidict mapping vals to nodes
|
||||
isdupkey, isdupval, nodeinv, nodefwd = dedup_result
|
||||
if not isdupkey and not isdupval:
|
||||
# No key or value duplication -> create and append a new node.
|
||||
sntl = self._sntl
|
||||
last = sntl.prv
|
||||
node = _Node(last, sntl)
|
||||
last.nxt = sntl.prv = fwdm[key] = invm[val] = node
|
||||
oldkey = oldval = _NONE
|
||||
elif isdupkey and isdupval:
|
||||
# Key and value duplication across two different nodes.
|
||||
assert nodefwd is not nodeinv
|
||||
oldval = invm.inverse[nodefwd] # type: ignore
|
||||
oldkey = fwdm.inverse[nodeinv] # type: ignore
|
||||
assert oldkey != key
|
||||
assert oldval != val
|
||||
# We have to collapse nodefwd and nodeinv into a single node, i.e. drop one of them.
|
||||
# Drop nodeinv, so that the item with the same key is the one overwritten in place.
|
||||
nodeinv.prv.nxt = nodeinv.nxt
|
||||
nodeinv.nxt.prv = nodeinv.prv
|
||||
# Don't remove nodeinv's references to its neighbors since
|
||||
# if the update fails, we'll need them to undo this write.
|
||||
# Update fwdm and invm.
|
||||
tmp = fwdm.pop(oldkey) # type: ignore
|
||||
assert tmp is nodeinv
|
||||
tmp = invm.pop(oldval) # type: ignore
|
||||
assert tmp is nodefwd
|
||||
fwdm[key] = invm[val] = nodefwd
|
||||
elif isdupkey:
|
||||
oldval = invm.inverse[nodefwd] # type: ignore
|
||||
oldkey = _NONE
|
||||
oldnodeinv = invm.pop(oldval) # type: ignore
|
||||
assert oldnodeinv is nodefwd
|
||||
invm[val] = nodefwd
|
||||
else: # isdupval
|
||||
oldkey = fwdm.inverse[nodeinv] # type: ignore
|
||||
oldval = _NONE
|
||||
oldnodefwd = fwdm.pop(oldkey) # type: ignore
|
||||
assert oldnodefwd is nodeinv
|
||||
fwdm[key] = nodeinv
|
||||
return _WriteResult(key, val, oldkey, oldval)
|
||||
|
||||
def _undo_write(self, dedup_result: _DedupResult, write_result: _WriteResult) -> None:
|
||||
fwdm = self._fwdm
|
||||
invm = self._invm
|
||||
isdupkey, isdupval, nodeinv, nodefwd = dedup_result
|
||||
key, val, oldkey, oldval = write_result
|
||||
if not isdupkey and not isdupval:
|
||||
self._pop(key)
|
||||
elif isdupkey and isdupval:
|
||||
# Restore original items.
|
||||
nodeinv.prv.nxt = nodeinv.nxt.prv = nodeinv
|
||||
fwdm[oldkey] = invm[val] = nodeinv
|
||||
invm[oldval] = fwdm[key] = nodefwd
|
||||
elif isdupkey:
|
||||
tmp = invm.pop(val)
|
||||
assert tmp is nodefwd
|
||||
invm[oldval] = nodefwd
|
||||
assert fwdm[key] is nodefwd
|
||||
else: # isdupval
|
||||
tmp = fwdm.pop(key)
|
||||
assert tmp is nodeinv
|
||||
fwdm[oldkey] = nodeinv
|
||||
assert invm[val] is nodeinv
|
||||
|
||||
def __iter__(self) -> _t.Iterator[KT]:
|
||||
"""Iterator over the contained keys in insertion order."""
|
||||
return self._iter()
|
||||
|
||||
def _iter(self, *, reverse: bool = False) -> _t.Iterator[KT]:
|
||||
fwdm_inv = self._fwdm.inverse
|
||||
for node in self._sntl._iter(reverse=reverse):
|
||||
yield fwdm_inv[node]
|
||||
|
||||
def __reversed__(self) -> _t.Iterator[KT]:
|
||||
"""Iterator over the contained keys in reverse insertion order."""
|
||||
yield from self._iter(reverse=True)
|
||||
|
||||
def equals_order_sensitive(self, other: object) -> bool:
|
||||
"""Order-sensitive equality check.
|
||||
|
||||
*See also* :ref:`eq-order-insensitive`
|
||||
"""
|
||||
# Same short-circuit as BidictBase.__eq__. Factoring out not worth function call overhead.
|
||||
if not isinstance(other, _t.Mapping) or len(self) != len(other):
|
||||
return False
|
||||
return all(i == j for (i, j) in zip(self.items(), other.items()))
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _bidict.py Current: _orderedbase.py Next: _frozenordered.py →
|
||||
#==============================================================================
|
93
matteo_env/Lib/site-packages/bidict/_orderedbidict.py
Normal file
93
matteo_env/Lib/site-packages/bidict/_orderedbidict.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
#==============================================================================
|
||||
|
||||
# Doing a code review? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the most important source files. This provides
|
||||
# a suggested initial path through the source when reviewing.
|
||||
#
|
||||
# Note: If you aren't reading this on https://github.com/jab/bidict, you may be
|
||||
# viewing an outdated version of the code. Please head to GitHub to review the
|
||||
# latest version, which contains important improvements over older versions.
|
||||
#
|
||||
# Thank you for reading and for any feedback you provide.
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _frozenordered.py Current: _orderedbidict.py <FIN>
|
||||
#==============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`OrderedBidict`."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
from ._mut import MutableBidict
|
||||
from ._orderedbase import OrderedBidictBase
|
||||
from ._typing import KT, VT
|
||||
|
||||
|
||||
class OrderedBidict(OrderedBidictBase[KT, VT], MutableBidict[KT, VT]):
|
||||
"""Mutable bidict type that maintains items in insertion order."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if _t.TYPE_CHECKING:
|
||||
@property
|
||||
def inverse(self) -> 'OrderedBidict[VT, KT]': ...
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Remove all items."""
|
||||
self._fwdm.clear()
|
||||
self._invm.clear()
|
||||
self._sntl.nxt = self._sntl.prv = self._sntl
|
||||
|
||||
def popitem(self, last: bool = True) -> _t.Tuple[KT, VT]:
|
||||
"""*x.popitem() → (k, v)*
|
||||
|
||||
Remove and return the most recently added item as a (key, value) pair
|
||||
if *last* is True, else the least recently added item.
|
||||
|
||||
:raises KeyError: if *x* is empty.
|
||||
"""
|
||||
if not self:
|
||||
raise KeyError('mapping is empty')
|
||||
key = next((reversed if last else iter)(self)) # type: ignore
|
||||
val = self._pop(key)
|
||||
return key, val
|
||||
|
||||
def move_to_end(self, key: KT, last: bool = True) -> None:
|
||||
"""Move an existing key to the beginning or end of this ordered bidict.
|
||||
|
||||
The item is moved to the end if *last* is True, else to the beginning.
|
||||
|
||||
:raises KeyError: if the key does not exist
|
||||
"""
|
||||
node = self._fwdm[key]
|
||||
node.prv.nxt = node.nxt
|
||||
node.nxt.prv = node.prv
|
||||
sntl = self._sntl
|
||||
if last:
|
||||
lastnode = sntl.prv
|
||||
node.prv = lastnode
|
||||
node.nxt = sntl
|
||||
sntl.prv = lastnode.nxt = node
|
||||
else:
|
||||
firstnode = sntl.nxt
|
||||
node.prv = sntl
|
||||
node.nxt = firstnode
|
||||
sntl.nxt = firstnode.prv = node
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
#==============================================================================
|
||||
# ← Prev: _frozenordered.py Current: _orderedbidict.py <FIN>
|
||||
#==============================================================================
|
33
matteo_env/Lib/site-packages/bidict/_typing.py
Normal file
33
matteo_env/Lib/site-packages/bidict/_typing.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide typing-related objects."""
|
||||
|
||||
import typing as _t
|
||||
|
||||
|
||||
KT = _t.TypeVar('KT')
|
||||
VT = _t.TypeVar('VT')
|
||||
IterItems = _t.Iterable[_t.Tuple[KT, VT]]
|
||||
MapOrIterItems = _t.Union[_t.Mapping[KT, VT], IterItems[KT, VT]]
|
||||
|
||||
DT = _t.TypeVar('DT') #: for default arguments
|
||||
VDT = _t.Union[VT, DT]
|
||||
|
||||
|
||||
class _BareReprMeta(type):
|
||||
def __repr__(cls) -> str:
|
||||
return f'<{cls.__name__}>'
|
||||
|
||||
|
||||
class _NONE(metaclass=_BareReprMeta):
|
||||
"""Sentinel type used to represent 'missing'."""
|
||||
|
||||
|
||||
OKT = _t.Union[KT, _NONE] #: optional key type
|
||||
OVT = _t.Union[VT, _NONE] #: optional value type
|
4
matteo_env/Lib/site-packages/bidict/_version.py
Normal file
4
matteo_env/Lib/site-packages/bidict/_version.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# coding: utf-8
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
version = '0.21.2'
|
49
matteo_env/Lib/site-packages/bidict/metadata.py
Normal file
49
matteo_env/Lib/site-packages/bidict/metadata.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2009-2020 Joshua Bronson. All Rights Reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
"""Define bidict package metadata."""
|
||||
|
||||
|
||||
# _version.py is generated by setuptools_scm (via its `write_to` param, see setup.py)
|
||||
try:
|
||||
from ._version import version
|
||||
except (ImportError, ValueError, SystemError): # pragma: no cover
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
__version__ = '0.0.0.VERSION_NOT_FOUND'
|
||||
else:
|
||||
try:
|
||||
__version__ = pkg_resources.get_distribution('bidict').version
|
||||
except pkg_resources.DistributionNotFound:
|
||||
__version__ = '0.0.0.VERSION_NOT_FOUND'
|
||||
else: # pragma: no cover
|
||||
__version__ = version
|
||||
|
||||
try:
|
||||
__version_info__ = tuple(int(p) if i < 3 else p for (i, p) in enumerate(__version__.split('.')))
|
||||
except Exception: # pragma: no cover
|
||||
__vesion_info__ = (0, 0, 0, f'PARSE FAILURE: __version__={__version__!r}')
|
||||
|
||||
__author__ = 'Joshua Bronson'
|
||||
__maintainer__ = 'Joshua Bronson'
|
||||
__copyright__ = 'Copyright 2009-2020 Joshua Bronson'
|
||||
__email__ = 'jabronson@gmail.com'
|
||||
|
||||
# See: ../docs/thanks.rst
|
||||
__credits__ = [i.strip() for i in """
|
||||
Joshua Bronson, Michael Arntzenius, Francis Carr, Gregory Ewing, Raymond Hettinger, Jozef Knaperek,
|
||||
Daniel Pope, Terry Reedy, David Turner, Tom Viner, Richard Sanger, Zeyi Wang
|
||||
""".split(',')]
|
||||
|
||||
__description__ = 'The bidirectional mapping library for Python.'
|
||||
__keywords__ = 'dict dictionary mapping datastructure bimap bijection bijective ' \
|
||||
'injective inverse reverse bidirectional two-way 2-way'
|
||||
|
||||
__license__ = 'MPL 2.0'
|
||||
__status__ = 'Beta'
|
||||
__url__ = 'https://bidict.readthedocs.io'
|
0
matteo_env/Lib/site-packages/bidict/py.typed
Normal file
0
matteo_env/Lib/site-packages/bidict/py.typed
Normal file
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,28 @@
|
|||
Copyright 2014 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
102
matteo_env/Lib/site-packages/click-7.1.2.dist-info/METADATA
Normal file
102
matteo_env/Lib/site-packages/click-7.1.2.dist-info/METADATA
Normal file
|
@ -0,0 +1,102 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: click
|
||||
Version: 7.1.2
|
||||
Summary: Composable command line interface toolkit
|
||||
Home-page: https://palletsprojects.com/p/click/
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Documentation, https://click.palletsprojects.com/
|
||||
Project-URL: Code, https://github.com/pallets/click
|
||||
Project-URL: Issue tracker, https://github.com/pallets/click/issues
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
|
||||
|
||||
\$ click\_
|
||||
==========
|
||||
|
||||
Click is a Python package for creating beautiful command line interfaces
|
||||
in a composable way with as little code as necessary. It's the "Command
|
||||
Line Interface Creation Kit". It's highly configurable but comes with
|
||||
sensible defaults out of the box.
|
||||
|
||||
It aims to make the process of writing command line tools quick and fun
|
||||
while also preventing any frustration caused by the inability to
|
||||
implement an intended CLI API.
|
||||
|
||||
Click in three points:
|
||||
|
||||
- Arbitrary nesting of commands
|
||||
- Automatic help page generation
|
||||
- Supports lazy loading of subcommands at runtime
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ pip install -U click
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
||||
|
||||
|
||||
A Simple Example
|
||||
----------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import click
|
||||
|
||||
@click.command()
|
||||
@click.option("--count", default=1, help="Number of greetings.")
|
||||
@click.option("--name", prompt="Your name", help="The person to greet.")
|
||||
def hello(count, name):
|
||||
"""Simple program that greets NAME for a total of COUNT times."""
|
||||
for _ in range(count):
|
||||
click.echo(f"Hello, {name}!")
|
||||
|
||||
if __name__ == '__main__':
|
||||
hello()
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ python hello.py --count=3
|
||||
Your name: Click
|
||||
Hello, Click!
|
||||
Hello, Click!
|
||||
Hello, Click!
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Click and other popular
|
||||
packages. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, `please
|
||||
donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Website: https://palletsprojects.com/p/click/
|
||||
- Documentation: https://click.palletsprojects.com/
|
||||
- Releases: https://pypi.org/project/click/
|
||||
- Code: https://github.com/pallets/click
|
||||
- Issue tracker: https://github.com/pallets/click/issues
|
||||
- Test status: https://dev.azure.com/pallets/click/_build
|
||||
- Official chat: https://discord.gg/t6rrQZH
|
||||
|
||||
|
40
matteo_env/Lib/site-packages/click-7.1.2.dist-info/RECORD
Normal file
40
matteo_env/Lib/site-packages/click-7.1.2.dist-info/RECORD
Normal file
|
@ -0,0 +1,40 @@
|
|||
click-7.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
click-7.1.2.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
||||
click-7.1.2.dist-info/METADATA,sha256=LrRgakZKV7Yg3qJqX_plu2WhFW81MzP3EqQmZhHIO8M,2868
|
||||
click-7.1.2.dist-info/RECORD,,
|
||||
click-7.1.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||
click-7.1.2.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6
|
||||
click/__init__.py,sha256=FkyGDQ-cbiQxP_lxgUspyFYS48f2S_pTcfKPz-d_RMo,2463
|
||||
click/__pycache__/__init__.cpython-38.pyc,,
|
||||
click/__pycache__/_bashcomplete.cpython-38.pyc,,
|
||||
click/__pycache__/_compat.cpython-38.pyc,,
|
||||
click/__pycache__/_termui_impl.cpython-38.pyc,,
|
||||
click/__pycache__/_textwrap.cpython-38.pyc,,
|
||||
click/__pycache__/_unicodefun.cpython-38.pyc,,
|
||||
click/__pycache__/_winconsole.cpython-38.pyc,,
|
||||
click/__pycache__/core.cpython-38.pyc,,
|
||||
click/__pycache__/decorators.cpython-38.pyc,,
|
||||
click/__pycache__/exceptions.cpython-38.pyc,,
|
||||
click/__pycache__/formatting.cpython-38.pyc,,
|
||||
click/__pycache__/globals.cpython-38.pyc,,
|
||||
click/__pycache__/parser.cpython-38.pyc,,
|
||||
click/__pycache__/termui.cpython-38.pyc,,
|
||||
click/__pycache__/testing.cpython-38.pyc,,
|
||||
click/__pycache__/types.cpython-38.pyc,,
|
||||
click/__pycache__/utils.cpython-38.pyc,,
|
||||
click/_bashcomplete.py,sha256=9J98IHQYmCAr2Jup6TDshUr5FJEen-AoQCZR0K5nKxQ,12309
|
||||
click/_compat.py,sha256=AoMaYnZ-3pwtNXuHtlb6_UXsayoG0QZiHKIRy2VFezc,24169
|
||||
click/_termui_impl.py,sha256=yNktUMAdjYOU1HMkq915jR3zgAzUNtGSQqSTSSMn3eQ,20702
|
||||
click/_textwrap.py,sha256=ajCzkzFly5tjm9foQ5N9_MOeaYJMBjAltuFa69n4iXY,1197
|
||||
click/_unicodefun.py,sha256=apLSNEBZgUsQNPMUv072zJ1swqnm0dYVT5TqcIWTt6w,4201
|
||||
click/_winconsole.py,sha256=6YDu6Rq1Wxx4w9uinBMK2LHvP83aerZM9GQurlk3QDo,10010
|
||||
click/core.py,sha256=V6DJzastGhrC6WTDwV9MSLwcJUdX2Uf1ypmgkjBdn_Y,77650
|
||||
click/decorators.py,sha256=3TvEO_BkaHl7k6Eh1G5eC7JK4LKPdpFqH9JP0QDyTlM,11215
|
||||
click/exceptions.py,sha256=3pQAyyMFzx5A3eV0Y27WtDTyGogZRbrC6_o5DjjKBbw,8118
|
||||
click/formatting.py,sha256=Wb4gqFEpWaKPgAbOvnkCl8p-bEZx5KpM5ZSByhlnJNk,9281
|
||||
click/globals.py,sha256=ht7u2kUGI08pAarB4e4yC8Lkkxy6gJfRZyzxEj8EbWQ,1501
|
||||
click/parser.py,sha256=mFK-k58JtPpqO0AC36WAr0t5UfzEw1mvgVSyn7WCe9M,15691
|
||||
click/termui.py,sha256=G7QBEKIepRIGLvNdGwBTYiEtSImRxvTO_AglVpyHH2s,23998
|
||||
click/testing.py,sha256=EUEsDUqNXFgCLhZ0ZFOROpaVDA5I_rijwnNPE6qICgA,12854
|
||||
click/types.py,sha256=wuubik4VqgqAw5dvbYFkDt-zSAx97y9TQXuXcVaRyQA,25045
|
||||
click/utils.py,sha256=4VEcJ7iEHwjnFuzEuRtkT99o5VG3zqSD7Q2CVzv13WU,15940
|
6
matteo_env/Lib/site-packages/click-7.1.2.dist-info/WHEEL
Normal file
6
matteo_env/Lib/site-packages/click-7.1.2.dist-info/WHEEL
Normal file
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1 @@
|
|||
click
|
79
matteo_env/Lib/site-packages/click/__init__.py
Normal file
79
matteo_env/Lib/site-packages/click/__init__.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
"""
|
||||
Click is a simple Python module inspired by the stdlib optparse to make
|
||||
writing command line scripts fun. Unlike other modules, it's based
|
||||
around a simple API that does not come with too much magic and is
|
||||
composable.
|
||||
"""
|
||||
from .core import Argument
|
||||
from .core import BaseCommand
|
||||
from .core import Command
|
||||
from .core import CommandCollection
|
||||
from .core import Context
|
||||
from .core import Group
|
||||
from .core import MultiCommand
|
||||
from .core import Option
|
||||
from .core import Parameter
|
||||
from .decorators import argument
|
||||
from .decorators import command
|
||||
from .decorators import confirmation_option
|
||||
from .decorators import group
|
||||
from .decorators import help_option
|
||||
from .decorators import make_pass_decorator
|
||||
from .decorators import option
|
||||
from .decorators import pass_context
|
||||
from .decorators import pass_obj
|
||||
from .decorators import password_option
|
||||
from .decorators import version_option
|
||||
from .exceptions import Abort
|
||||
from .exceptions import BadArgumentUsage
|
||||
from .exceptions import BadOptionUsage
|
||||
from .exceptions import BadParameter
|
||||
from .exceptions import ClickException
|
||||
from .exceptions import FileError
|
||||
from .exceptions import MissingParameter
|
||||
from .exceptions import NoSuchOption
|
||||
from .exceptions import UsageError
|
||||
from .formatting import HelpFormatter
|
||||
from .formatting import wrap_text
|
||||
from .globals import get_current_context
|
||||
from .parser import OptionParser
|
||||
from .termui import clear
|
||||
from .termui import confirm
|
||||
from .termui import echo_via_pager
|
||||
from .termui import edit
|
||||
from .termui import get_terminal_size
|
||||
from .termui import getchar
|
||||
from .termui import launch
|
||||
from .termui import pause
|
||||
from .termui import progressbar
|
||||
from .termui import prompt
|
||||
from .termui import secho
|
||||
from .termui import style
|
||||
from .termui import unstyle
|
||||
from .types import BOOL
|
||||
from .types import Choice
|
||||
from .types import DateTime
|
||||
from .types import File
|
||||
from .types import FLOAT
|
||||
from .types import FloatRange
|
||||
from .types import INT
|
||||
from .types import IntRange
|
||||
from .types import ParamType
|
||||
from .types import Path
|
||||
from .types import STRING
|
||||
from .types import Tuple
|
||||
from .types import UNPROCESSED
|
||||
from .types import UUID
|
||||
from .utils import echo
|
||||
from .utils import format_filename
|
||||
from .utils import get_app_dir
|
||||
from .utils import get_binary_stream
|
||||
from .utils import get_os_args
|
||||
from .utils import get_text_stream
|
||||
from .utils import open_file
|
||||
|
||||
# Controls if click should emit the warning about the use of unicode
|
||||
# literals.
|
||||
disable_unicode_literals_warning = False
|
||||
|
||||
__version__ = "7.1.2"
|
375
matteo_env/Lib/site-packages/click/_bashcomplete.py
Normal file
375
matteo_env/Lib/site-packages/click/_bashcomplete.py
Normal file
|
@ -0,0 +1,375 @@
|
|||
import copy
|
||||
import os
|
||||
import re
|
||||
|
||||
from .core import Argument
|
||||
from .core import MultiCommand
|
||||
from .core import Option
|
||||
from .parser import split_arg_string
|
||||
from .types import Choice
|
||||
from .utils import echo
|
||||
|
||||
try:
|
||||
from collections import abc
|
||||
except ImportError:
|
||||
import collections as abc
|
||||
|
||||
WORDBREAK = "="
|
||||
|
||||
# Note, only BASH version 4.4 and later have the nosort option.
|
||||
COMPLETION_SCRIPT_BASH = """
|
||||
%(complete_func)s() {
|
||||
local IFS=$'\n'
|
||||
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
%(autocomplete_var)s=complete $1 ) )
|
||||
return 0
|
||||
}
|
||||
|
||||
%(complete_func)setup() {
|
||||
local COMPLETION_OPTIONS=""
|
||||
local BASH_VERSION_ARR=(${BASH_VERSION//./ })
|
||||
# Only BASH version 4.4 and later have the nosort option.
|
||||
if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] \
|
||||
&& [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then
|
||||
COMPLETION_OPTIONS="-o nosort"
|
||||
fi
|
||||
|
||||
complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s
|
||||
}
|
||||
|
||||
%(complete_func)setup
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPT_ZSH = """
|
||||
#compdef %(script_names)s
|
||||
|
||||
%(complete_func)s() {
|
||||
local -a completions
|
||||
local -a completions_with_descriptions
|
||||
local -a response
|
||||
(( ! $+commands[%(script_names)s] )) && return 1
|
||||
|
||||
response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\
|
||||
COMP_CWORD=$((CURRENT-1)) \\
|
||||
%(autocomplete_var)s=\"complete_zsh\" \\
|
||||
%(script_names)s )}")
|
||||
|
||||
for key descr in ${(kv)response}; do
|
||||
if [[ "$descr" == "_" ]]; then
|
||||
completions+=("$key")
|
||||
else
|
||||
completions_with_descriptions+=("$key":"$descr")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$completions_with_descriptions" ]; then
|
||||
_describe -V unsorted completions_with_descriptions -U
|
||||
fi
|
||||
|
||||
if [ -n "$completions" ]; then
|
||||
compadd -U -V unsorted -a completions
|
||||
fi
|
||||
compstate[insert]="automenu"
|
||||
}
|
||||
|
||||
compdef %(complete_func)s %(script_names)s
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPT_FISH = (
|
||||
"complete --no-files --command %(script_names)s --arguments"
|
||||
' "(env %(autocomplete_var)s=complete_fish'
|
||||
" COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t)"
|
||||
' %(script_names)s)"'
|
||||
)
|
||||
|
||||
_completion_scripts = {
|
||||
"bash": COMPLETION_SCRIPT_BASH,
|
||||
"zsh": COMPLETION_SCRIPT_ZSH,
|
||||
"fish": COMPLETION_SCRIPT_FISH,
|
||||
}
|
||||
|
||||
_invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]")
|
||||
|
||||
|
||||
def get_completion_script(prog_name, complete_var, shell):
|
||||
cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_"))
|
||||
script = _completion_scripts.get(shell, COMPLETION_SCRIPT_BASH)
|
||||
return (
|
||||
script
|
||||
% {
|
||||
"complete_func": "_{}_completion".format(cf_name),
|
||||
"script_names": prog_name,
|
||||
"autocomplete_var": complete_var,
|
||||
}
|
||||
).strip() + ";"
|
||||
|
||||
|
||||
def resolve_ctx(cli, prog_name, args):
|
||||
"""Parse into a hierarchy of contexts. Contexts are connected
|
||||
through the parent variable.
|
||||
|
||||
:param cli: command definition
|
||||
:param prog_name: the program that is running
|
||||
:param args: full list of args
|
||||
:return: the final context/command parsed
|
||||
"""
|
||||
ctx = cli.make_context(prog_name, args, resilient_parsing=True)
|
||||
args = ctx.protected_args + ctx.args
|
||||
while args:
|
||||
if isinstance(ctx.command, MultiCommand):
|
||||
if not ctx.command.chain:
|
||||
cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
|
||||
if cmd is None:
|
||||
return ctx
|
||||
ctx = cmd.make_context(
|
||||
cmd_name, args, parent=ctx, resilient_parsing=True
|
||||
)
|
||||
args = ctx.protected_args + ctx.args
|
||||
else:
|
||||
# Walk chained subcommand contexts saving the last one.
|
||||
while args:
|
||||
cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
|
||||
if cmd is None:
|
||||
return ctx
|
||||
sub_ctx = cmd.make_context(
|
||||
cmd_name,
|
||||
args,
|
||||
parent=ctx,
|
||||
allow_extra_args=True,
|
||||
allow_interspersed_args=False,
|
||||
resilient_parsing=True,
|
||||
)
|
||||
args = sub_ctx.args
|
||||
ctx = sub_ctx
|
||||
args = sub_ctx.protected_args + sub_ctx.args
|
||||
else:
|
||||
break
|
||||
return ctx
|
||||
|
||||
|
||||
def start_of_option(param_str):
|
||||
"""
|
||||
:param param_str: param_str to check
|
||||
:return: whether or not this is the start of an option declaration
|
||||
(i.e. starts "-" or "--")
|
||||
"""
|
||||
return param_str and param_str[:1] == "-"
|
||||
|
||||
|
||||
def is_incomplete_option(all_args, cmd_param):
|
||||
"""
|
||||
:param all_args: the full original list of args supplied
|
||||
:param cmd_param: the current command paramter
|
||||
:return: whether or not the last option declaration (i.e. starts
|
||||
"-" or "--") is incomplete and corresponds to this cmd_param. In
|
||||
other words whether this cmd_param option can still accept
|
||||
values
|
||||
"""
|
||||
if not isinstance(cmd_param, Option):
|
||||
return False
|
||||
if cmd_param.is_flag:
|
||||
return False
|
||||
last_option = None
|
||||
for index, arg_str in enumerate(
|
||||
reversed([arg for arg in all_args if arg != WORDBREAK])
|
||||
):
|
||||
if index + 1 > cmd_param.nargs:
|
||||
break
|
||||
if start_of_option(arg_str):
|
||||
last_option = arg_str
|
||||
|
||||
return True if last_option and last_option in cmd_param.opts else False
|
||||
|
||||
|
||||
def is_incomplete_argument(current_params, cmd_param):
|
||||
"""
|
||||
:param current_params: the current params and values for this
|
||||
argument as already entered
|
||||
:param cmd_param: the current command parameter
|
||||
:return: whether or not the last argument is incomplete and
|
||||
corresponds to this cmd_param. In other words whether or not the
|
||||
this cmd_param argument can still accept values
|
||||
"""
|
||||
if not isinstance(cmd_param, Argument):
|
||||
return False
|
||||
current_param_values = current_params[cmd_param.name]
|
||||
if current_param_values is None:
|
||||
return True
|
||||
if cmd_param.nargs == -1:
|
||||
return True
|
||||
if (
|
||||
isinstance(current_param_values, abc.Iterable)
|
||||
and cmd_param.nargs > 1
|
||||
and len(current_param_values) < cmd_param.nargs
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_user_autocompletions(ctx, args, incomplete, cmd_param):
|
||||
"""
|
||||
:param ctx: context associated with the parsed command
|
||||
:param args: full list of args
|
||||
:param incomplete: the incomplete text to autocomplete
|
||||
:param cmd_param: command definition
|
||||
:return: all the possible user-specified completions for the param
|
||||
"""
|
||||
results = []
|
||||
if isinstance(cmd_param.type, Choice):
|
||||
# Choices don't support descriptions.
|
||||
results = [
|
||||
(c, None) for c in cmd_param.type.choices if str(c).startswith(incomplete)
|
||||
]
|
||||
elif cmd_param.autocompletion is not None:
|
||||
dynamic_completions = cmd_param.autocompletion(
|
||||
ctx=ctx, args=args, incomplete=incomplete
|
||||
)
|
||||
results = [
|
||||
c if isinstance(c, tuple) else (c, None) for c in dynamic_completions
|
||||
]
|
||||
return results
|
||||
|
||||
|
||||
def get_visible_commands_starting_with(ctx, starts_with):
|
||||
"""
|
||||
:param ctx: context associated with the parsed command
|
||||
:starts_with: string that visible commands must start with.
|
||||
:return: all visible (not hidden) commands that start with starts_with.
|
||||
"""
|
||||
for c in ctx.command.list_commands(ctx):
|
||||
if c.startswith(starts_with):
|
||||
command = ctx.command.get_command(ctx, c)
|
||||
if not command.hidden:
|
||||
yield command
|
||||
|
||||
|
||||
def add_subcommand_completions(ctx, incomplete, completions_out):
|
||||
# Add subcommand completions.
|
||||
if isinstance(ctx.command, MultiCommand):
|
||||
completions_out.extend(
|
||||
[
|
||||
(c.name, c.get_short_help_str())
|
||||
for c in get_visible_commands_starting_with(ctx, incomplete)
|
||||
]
|
||||
)
|
||||
|
||||
# Walk up the context list and add any other completion
|
||||
# possibilities from chained commands
|
||||
while ctx.parent is not None:
|
||||
ctx = ctx.parent
|
||||
if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
|
||||
remaining_commands = [
|
||||
c
|
||||
for c in get_visible_commands_starting_with(ctx, incomplete)
|
||||
if c.name not in ctx.protected_args
|
||||
]
|
||||
completions_out.extend(
|
||||
[(c.name, c.get_short_help_str()) for c in remaining_commands]
|
||||
)
|
||||
|
||||
|
||||
def get_choices(cli, prog_name, args, incomplete):
|
||||
"""
|
||||
:param cli: command definition
|
||||
:param prog_name: the program that is running
|
||||
:param args: full list of args
|
||||
:param incomplete: the incomplete text to autocomplete
|
||||
:return: all the possible completions for the incomplete
|
||||
"""
|
||||
all_args = copy.deepcopy(args)
|
||||
|
||||
ctx = resolve_ctx(cli, prog_name, args)
|
||||
if ctx is None:
|
||||
return []
|
||||
|
||||
has_double_dash = "--" in all_args
|
||||
|
||||
# In newer versions of bash long opts with '='s are partitioned, but
|
||||
# it's easier to parse without the '='
|
||||
if start_of_option(incomplete) and WORDBREAK in incomplete:
|
||||
partition_incomplete = incomplete.partition(WORDBREAK)
|
||||
all_args.append(partition_incomplete[0])
|
||||
incomplete = partition_incomplete[2]
|
||||
elif incomplete == WORDBREAK:
|
||||
incomplete = ""
|
||||
|
||||
completions = []
|
||||
if not has_double_dash and start_of_option(incomplete):
|
||||
# completions for partial options
|
||||
for param in ctx.command.params:
|
||||
if isinstance(param, Option) and not param.hidden:
|
||||
param_opts = [
|
||||
param_opt
|
||||
for param_opt in param.opts + param.secondary_opts
|
||||
if param_opt not in all_args or param.multiple
|
||||
]
|
||||
completions.extend(
|
||||
[(o, param.help) for o in param_opts if o.startswith(incomplete)]
|
||||
)
|
||||
return completions
|
||||
# completion for option values from user supplied values
|
||||
for param in ctx.command.params:
|
||||
if is_incomplete_option(all_args, param):
|
||||
return get_user_autocompletions(ctx, all_args, incomplete, param)
|
||||
# completion for argument values from user supplied values
|
||||
for param in ctx.command.params:
|
||||
if is_incomplete_argument(ctx.params, param):
|
||||
return get_user_autocompletions(ctx, all_args, incomplete, param)
|
||||
|
||||
add_subcommand_completions(ctx, incomplete, completions)
|
||||
# Sort before returning so that proper ordering can be enforced in custom types.
|
||||
return sorted(completions)
|
||||
|
||||
|
||||
def do_complete(cli, prog_name, include_descriptions):
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
args = cwords[1:cword]
|
||||
try:
|
||||
incomplete = cwords[cword]
|
||||
except IndexError:
|
||||
incomplete = ""
|
||||
|
||||
for item in get_choices(cli, prog_name, args, incomplete):
|
||||
echo(item[0])
|
||||
if include_descriptions:
|
||||
# ZSH has trouble dealing with empty array parameters when
|
||||
# returned from commands, use '_' to indicate no description
|
||||
# is present.
|
||||
echo(item[1] if item[1] else "_")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def do_complete_fish(cli, prog_name):
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
incomplete = os.environ["COMP_CWORD"]
|
||||
args = cwords[1:]
|
||||
|
||||
for item in get_choices(cli, prog_name, args, incomplete):
|
||||
if item[1]:
|
||||
echo("{arg}\t{desc}".format(arg=item[0], desc=item[1]))
|
||||
else:
|
||||
echo(item[0])
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def bashcomplete(cli, prog_name, complete_var, complete_instr):
|
||||
if "_" in complete_instr:
|
||||
command, shell = complete_instr.split("_", 1)
|
||||
else:
|
||||
command = complete_instr
|
||||
shell = "bash"
|
||||
|
||||
if command == "source":
|
||||
echo(get_completion_script(prog_name, complete_var, shell))
|
||||
return True
|
||||
elif command == "complete":
|
||||
if shell == "fish":
|
||||
return do_complete_fish(cli, prog_name)
|
||||
elif shell in {"bash", "zsh"}:
|
||||
return do_complete(cli, prog_name, shell == "zsh")
|
||||
|
||||
return False
|
786
matteo_env/Lib/site-packages/click/_compat.py
Normal file
786
matteo_env/Lib/site-packages/click/_compat.py
Normal file
|
@ -0,0 +1,786 @@
|
|||
# flake8: noqa
|
||||
import codecs
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
CYGWIN = sys.platform.startswith("cygwin")
|
||||
MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version)
|
||||
# Determine local App Engine environment, per Google's own suggestion
|
||||
APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get(
|
||||
"SERVER_SOFTWARE", ""
|
||||
)
|
||||
WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2
|
||||
DEFAULT_COLUMNS = 80
|
||||
|
||||
|
||||
_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
|
||||
|
||||
|
||||
def get_filesystem_encoding():
|
||||
return sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
|
||||
def _make_text_stream(
|
||||
stream, encoding, errors, force_readable=False, force_writable=False
|
||||
):
|
||||
if encoding is None:
|
||||
encoding = get_best_encoding(stream)
|
||||
if errors is None:
|
||||
errors = "replace"
|
||||
return _NonClosingTextIOWrapper(
|
||||
stream,
|
||||
encoding,
|
||||
errors,
|
||||
line_buffering=True,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def is_ascii_encoding(encoding):
|
||||
"""Checks if a given encoding is ascii."""
|
||||
try:
|
||||
return codecs.lookup(encoding).name == "ascii"
|
||||
except LookupError:
|
||||
return False
|
||||
|
||||
|
||||
def get_best_encoding(stream):
|
||||
"""Returns the default stream encoding if not found."""
|
||||
rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
|
||||
if is_ascii_encoding(rv):
|
||||
return "utf-8"
|
||||
return rv
|
||||
|
||||
|
||||
class _NonClosingTextIOWrapper(io.TextIOWrapper):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
encoding,
|
||||
errors,
|
||||
force_readable=False,
|
||||
force_writable=False,
|
||||
**extra
|
||||
):
|
||||
self._stream = stream = _FixupStream(stream, force_readable, force_writable)
|
||||
io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra)
|
||||
|
||||
# The io module is a place where the Python 3 text behavior
|
||||
# was forced upon Python 2, so we need to unbreak
|
||||
# it to look like Python 2.
|
||||
if PY2:
|
||||
|
||||
def write(self, x):
|
||||
if isinstance(x, str) or is_bytes(x):
|
||||
try:
|
||||
self.flush()
|
||||
except Exception:
|
||||
pass
|
||||
return self.buffer.write(str(x))
|
||||
return io.TextIOWrapper.write(self, x)
|
||||
|
||||
def writelines(self, lines):
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.detach()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def isatty(self):
|
||||
# https://bitbucket.org/pypy/pypy/issue/1803
|
||||
return self._stream.isatty()
|
||||
|
||||
|
||||
class _FixupStream(object):
|
||||
"""The new io interface needs more from streams than streams
|
||||
traditionally implement. As such, this fix-up code is necessary in
|
||||
some circumstances.
|
||||
|
||||
The forcing of readable and writable flags are there because some tools
|
||||
put badly patched objects on sys (one such offender are certain version
|
||||
of jupyter notebook).
|
||||
"""
|
||||
|
||||
def __init__(self, stream, force_readable=False, force_writable=False):
|
||||
self._stream = stream
|
||||
self._force_readable = force_readable
|
||||
self._force_writable = force_writable
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._stream, name)
|
||||
|
||||
def read1(self, size):
|
||||
f = getattr(self._stream, "read1", None)
|
||||
if f is not None:
|
||||
return f(size)
|
||||
# We only dispatch to readline instead of read in Python 2 as we
|
||||
# do not want cause problems with the different implementation
|
||||
# of line buffering.
|
||||
if PY2:
|
||||
return self._stream.readline(size)
|
||||
return self._stream.read(size)
|
||||
|
||||
def readable(self):
|
||||
if self._force_readable:
|
||||
return True
|
||||
x = getattr(self._stream, "readable", None)
|
||||
if x is not None:
|
||||
return x()
|
||||
try:
|
||||
self._stream.read(0)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def writable(self):
|
||||
if self._force_writable:
|
||||
return True
|
||||
x = getattr(self._stream, "writable", None)
|
||||
if x is not None:
|
||||
return x()
|
||||
try:
|
||||
self._stream.write("")
|
||||
except Exception:
|
||||
try:
|
||||
self._stream.write(b"")
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def seekable(self):
|
||||
x = getattr(self._stream, "seekable", None)
|
||||
if x is not None:
|
||||
return x()
|
||||
try:
|
||||
self._stream.seek(self._stream.tell())
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
if PY2:
|
||||
text_type = unicode
|
||||
raw_input = raw_input
|
||||
string_types = (str, unicode)
|
||||
int_types = (int, long)
|
||||
iteritems = lambda x: x.iteritems()
|
||||
range_type = xrange
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (buffer, bytearray))
|
||||
|
||||
_identifier_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$")
|
||||
|
||||
# For Windows, we need to force stdout/stdin/stderr to binary if it's
|
||||
# fetched for that. This obviously is not the most correct way to do
|
||||
# it as it changes global state. Unfortunately, there does not seem to
|
||||
# be a clear better way to do it as just reopening the file in binary
|
||||
# mode does not change anything.
|
||||
#
|
||||
# An option would be to do what Python 3 does and to open the file as
|
||||
# binary only, patch it back to the system, and then use a wrapper
|
||||
# stream that converts newlines. It's not quite clear what's the
|
||||
# correct option here.
|
||||
#
|
||||
# This code also lives in _winconsole for the fallback to the console
|
||||
# emulation stream.
|
||||
#
|
||||
# There are also Windows environments where the `msvcrt` module is not
|
||||
# available (which is why we use try-catch instead of the WIN variable
|
||||
# here), such as the Google App Engine development server on Windows. In
|
||||
# those cases there is just nothing we can do.
|
||||
def set_binary_mode(f):
|
||||
return f
|
||||
|
||||
try:
|
||||
import msvcrt
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
|
||||
def set_binary_mode(f):
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
msvcrt.setmode(fileno, os.O_BINARY)
|
||||
return f
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
|
||||
def set_binary_mode(f):
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
|
||||
fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK)
|
||||
return f
|
||||
|
||||
def isidentifier(x):
|
||||
return _identifier_re.search(x) is not None
|
||||
|
||||
def get_binary_stdin():
|
||||
return set_binary_mode(sys.stdin)
|
||||
|
||||
def get_binary_stdout():
|
||||
_wrap_std_stream("stdout")
|
||||
return set_binary_mode(sys.stdout)
|
||||
|
||||
def get_binary_stderr():
|
||||
_wrap_std_stream("stderr")
|
||||
return set_binary_mode(sys.stderr)
|
||||
|
||||
def get_text_stdin(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _make_text_stream(sys.stdin, encoding, errors, force_readable=True)
|
||||
|
||||
def get_text_stdout(encoding=None, errors=None):
|
||||
_wrap_std_stream("stdout")
|
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _make_text_stream(sys.stdout, encoding, errors, force_writable=True)
|
||||
|
||||
def get_text_stderr(encoding=None, errors=None):
|
||||
_wrap_std_stream("stderr")
|
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _make_text_stream(sys.stderr, encoding, errors, force_writable=True)
|
||||
|
||||
def filename_to_ui(value):
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(get_filesystem_encoding(), "replace")
|
||||
return value
|
||||
|
||||
|
||||
else:
|
||||
import io
|
||||
|
||||
text_type = str
|
||||
raw_input = input
|
||||
string_types = (str,)
|
||||
int_types = (int,)
|
||||
range_type = range
|
||||
isidentifier = lambda x: x.isidentifier()
|
||||
iteritems = lambda x: iter(x.items())
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (bytes, memoryview, bytearray))
|
||||
|
||||
def _is_binary_reader(stream, default=False):
|
||||
try:
|
||||
return isinstance(stream.read(0), bytes)
|
||||
except Exception:
|
||||
return default
|
||||
# This happens in some cases where the stream was already
|
||||
# closed. In this case, we assume the default.
|
||||
|
||||
def _is_binary_writer(stream, default=False):
|
||||
try:
|
||||
stream.write(b"")
|
||||
except Exception:
|
||||
try:
|
||||
stream.write("")
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
return default
|
||||
return True
|
||||
|
||||
def _find_binary_reader(stream):
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detaching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_reader(stream, False):
|
||||
return stream
|
||||
|
||||
buf = getattr(stream, "buffer", None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_reader(buf, True):
|
||||
return buf
|
||||
|
||||
def _find_binary_writer(stream):
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detatching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_writer(stream, False):
|
||||
return stream
|
||||
|
||||
buf = getattr(stream, "buffer", None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_writer(buf, True):
|
||||
return buf
|
||||
|
||||
def _stream_is_misconfigured(stream):
|
||||
"""A stream is misconfigured if its encoding is ASCII."""
|
||||
# If the stream does not have an encoding set, we assume it's set
|
||||
# to ASCII. This appears to happen in certain unittest
|
||||
# environments. It's not quite clear what the correct behavior is
|
||||
# but this at least will force Click to recover somehow.
|
||||
return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
|
||||
|
||||
def _is_compat_stream_attr(stream, attr, value):
|
||||
"""A stream attribute is compatible if it is equal to the
|
||||
desired value or the desired value is unset and the attribute
|
||||
has a value.
|
||||
"""
|
||||
stream_value = getattr(stream, attr, None)
|
||||
return stream_value == value or (value is None and stream_value is not None)
|
||||
|
||||
def _is_compatible_text_stream(stream, encoding, errors):
|
||||
"""Check if a stream's encoding and errors attributes are
|
||||
compatible with the desired values.
|
||||
"""
|
||||
return _is_compat_stream_attr(
|
||||
stream, "encoding", encoding
|
||||
) and _is_compat_stream_attr(stream, "errors", errors)
|
||||
|
||||
def _force_correct_text_stream(
|
||||
text_stream,
|
||||
encoding,
|
||||
errors,
|
||||
is_binary,
|
||||
find_binary,
|
||||
force_readable=False,
|
||||
force_writable=False,
|
||||
):
|
||||
if is_binary(text_stream, False):
|
||||
binary_reader = text_stream
|
||||
else:
|
||||
# If the stream looks compatible, and won't default to a
|
||||
# misconfigured ascii encoding, return it as-is.
|
||||
if _is_compatible_text_stream(text_stream, encoding, errors) and not (
|
||||
encoding is None and _stream_is_misconfigured(text_stream)
|
||||
):
|
||||
return text_stream
|
||||
|
||||
# Otherwise, get the underlying binary reader.
|
||||
binary_reader = find_binary(text_stream)
|
||||
|
||||
# If that's not possible, silently use the original reader
|
||||
# and get mojibake instead of exceptions.
|
||||
if binary_reader is None:
|
||||
return text_stream
|
||||
|
||||
# Default errors to replace instead of strict in order to get
|
||||
# something that works.
|
||||
if errors is None:
|
||||
errors = "replace"
|
||||
|
||||
# Wrap the binary stream in a text stream with the correct
|
||||
# encoding parameters.
|
||||
return _make_text_stream(
|
||||
binary_reader,
|
||||
encoding,
|
||||
errors,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
def _force_correct_text_reader(text_reader, encoding, errors, force_readable=False):
|
||||
return _force_correct_text_stream(
|
||||
text_reader,
|
||||
encoding,
|
||||
errors,
|
||||
_is_binary_reader,
|
||||
_find_binary_reader,
|
||||
force_readable=force_readable,
|
||||
)
|
||||
|
||||
def _force_correct_text_writer(text_writer, encoding, errors, force_writable=False):
|
||||
return _force_correct_text_stream(
|
||||
text_writer,
|
||||
encoding,
|
||||
errors,
|
||||
_is_binary_writer,
|
||||
_find_binary_writer,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
def get_binary_stdin():
|
||||
reader = _find_binary_reader(sys.stdin)
|
||||
if reader is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
|
||||
return reader
|
||||
|
||||
def get_binary_stdout():
|
||||
writer = _find_binary_writer(sys.stdout)
|
||||
if writer is None:
|
||||
raise RuntimeError(
|
||||
"Was not able to determine binary stream for sys.stdout."
|
||||
)
|
||||
return writer
|
||||
|
||||
def get_binary_stderr():
|
||||
writer = _find_binary_writer(sys.stderr)
|
||||
if writer is None:
|
||||
raise RuntimeError(
|
||||
"Was not able to determine binary stream for sys.stderr."
|
||||
)
|
||||
return writer
|
||||
|
||||
def get_text_stdin(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_reader(
|
||||
sys.stdin, encoding, errors, force_readable=True
|
||||
)
|
||||
|
||||
def get_text_stdout(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(
|
||||
sys.stdout, encoding, errors, force_writable=True
|
||||
)
|
||||
|
||||
def get_text_stderr(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(
|
||||
sys.stderr, encoding, errors, force_writable=True
|
||||
)
|
||||
|
||||
def filename_to_ui(value):
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(get_filesystem_encoding(), "replace")
|
||||
else:
|
||||
value = value.encode("utf-8", "surrogateescape").decode("utf-8", "replace")
|
||||
return value
|
||||
|
||||
|
||||
def get_streerror(e, default=None):
|
||||
if hasattr(e, "strerror"):
|
||||
msg = e.strerror
|
||||
else:
|
||||
if default is not None:
|
||||
msg = default
|
||||
else:
|
||||
msg = str(e)
|
||||
if isinstance(msg, bytes):
|
||||
msg = msg.decode("utf-8", "replace")
|
||||
return msg
|
||||
|
||||
|
||||
def _wrap_io_open(file, mode, encoding, errors):
|
||||
"""On Python 2, :func:`io.open` returns a text file wrapper that
|
||||
requires passing ``unicode`` to ``write``. Need to open the file in
|
||||
binary mode then wrap it in a subclass that can write ``str`` and
|
||||
``unicode``.
|
||||
|
||||
Also handles not passing ``encoding`` and ``errors`` in binary mode.
|
||||
"""
|
||||
binary = "b" in mode
|
||||
|
||||
if binary:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = {"encoding": encoding, "errors": errors}
|
||||
|
||||
if not PY2 or binary:
|
||||
return io.open(file, mode, **kwargs)
|
||||
|
||||
f = io.open(file, "{}b".format(mode.replace("t", "")))
|
||||
return _make_text_stream(f, **kwargs)
|
||||
|
||||
|
||||
def open_stream(filename, mode="r", encoding=None, errors="strict", atomic=False):
|
||||
binary = "b" in mode
|
||||
|
||||
# Standard streams first. These are simple because they don't need
|
||||
# special handling for the atomic flag. It's entirely ignored.
|
||||
if filename == "-":
|
||||
if any(m in mode for m in ["w", "a", "x"]):
|
||||
if binary:
|
||||
return get_binary_stdout(), False
|
||||
return get_text_stdout(encoding=encoding, errors=errors), False
|
||||
if binary:
|
||||
return get_binary_stdin(), False
|
||||
return get_text_stdin(encoding=encoding, errors=errors), False
|
||||
|
||||
# Non-atomic writes directly go out through the regular open functions.
|
||||
if not atomic:
|
||||
return _wrap_io_open(filename, mode, encoding, errors), True
|
||||
|
||||
# Some usability stuff for atomic writes
|
||||
if "a" in mode:
|
||||
raise ValueError(
|
||||
"Appending to an existing file is not supported, because that"
|
||||
" would involve an expensive `copy`-operation to a temporary"
|
||||
" file. Open the file in normal `w`-mode and copy explicitly"
|
||||
" if that's what you're after."
|
||||
)
|
||||
if "x" in mode:
|
||||
raise ValueError("Use the `overwrite`-parameter instead.")
|
||||
if "w" not in mode:
|
||||
raise ValueError("Atomic writes only make sense with `w`-mode.")
|
||||
|
||||
# Atomic writes are more complicated. They work by opening a file
|
||||
# as a proxy in the same folder and then using the fdopen
|
||||
# functionality to wrap it in a Python file. Then we wrap it in an
|
||||
# atomic file that moves the file over on close.
|
||||
import errno
|
||||
import random
|
||||
|
||||
try:
|
||||
perm = os.stat(filename).st_mode
|
||||
except OSError:
|
||||
perm = None
|
||||
|
||||
flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
|
||||
|
||||
if binary:
|
||||
flags |= getattr(os, "O_BINARY", 0)
|
||||
|
||||
while True:
|
||||
tmp_filename = os.path.join(
|
||||
os.path.dirname(filename),
|
||||
".__atomic-write{:08x}".format(random.randrange(1 << 32)),
|
||||
)
|
||||
try:
|
||||
fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
|
||||
break
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST or (
|
||||
os.name == "nt"
|
||||
and e.errno == errno.EACCES
|
||||
and os.path.isdir(e.filename)
|
||||
and os.access(e.filename, os.W_OK)
|
||||
):
|
||||
continue
|
||||
raise
|
||||
|
||||
if perm is not None:
|
||||
os.chmod(tmp_filename, perm) # in case perm includes bits in umask
|
||||
|
||||
f = _wrap_io_open(fd, mode, encoding, errors)
|
||||
return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True
|
||||
|
||||
|
||||
# Used in a destructor call, needs extra protection from interpreter cleanup.
|
||||
if hasattr(os, "replace"):
|
||||
_replace = os.replace
|
||||
_can_replace = True
|
||||
else:
|
||||
_replace = os.rename
|
||||
_can_replace = not WIN
|
||||
|
||||
|
||||
class _AtomicFile(object):
|
||||
def __init__(self, f, tmp_filename, real_filename):
|
||||
self._f = f
|
||||
self._tmp_filename = tmp_filename
|
||||
self._real_filename = real_filename
|
||||
self.closed = False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._real_filename
|
||||
|
||||
def close(self, delete=False):
|
||||
if self.closed:
|
||||
return
|
||||
self._f.close()
|
||||
if not _can_replace:
|
||||
try:
|
||||
os.remove(self._real_filename)
|
||||
except OSError:
|
||||
pass
|
||||
_replace(self._tmp_filename, self._real_filename)
|
||||
self.closed = True
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._f, name)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.close(delete=exc_type is not None)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._f)
|
||||
|
||||
|
||||
auto_wrap_for_ansi = None
|
||||
colorama = None
|
||||
get_winterm_size = None
|
||||
|
||||
|
||||
def strip_ansi(value):
|
||||
return _ansi_re.sub("", value)
|
||||
|
||||
|
||||
def _is_jupyter_kernel_output(stream):
|
||||
if WIN:
|
||||
# TODO: Couldn't test on Windows, should't try to support until
|
||||
# someone tests the details wrt colorama.
|
||||
return
|
||||
|
||||
while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
|
||||
stream = stream._stream
|
||||
|
||||
return stream.__class__.__module__.startswith("ipykernel.")
|
||||
|
||||
|
||||
def should_strip_ansi(stream=None, color=None):
|
||||
if color is None:
|
||||
if stream is None:
|
||||
stream = sys.stdin
|
||||
return not isatty(stream) and not _is_jupyter_kernel_output(stream)
|
||||
return not color
|
||||
|
||||
|
||||
# If we're on Windows, we provide transparent integration through
|
||||
# colorama. This will make ANSI colors through the echo function
|
||||
# work automatically.
|
||||
if WIN:
|
||||
# Windows has a smaller terminal
|
||||
DEFAULT_COLUMNS = 79
|
||||
|
||||
from ._winconsole import _get_windows_console_stream, _wrap_std_stream
|
||||
|
||||
def _get_argv_encoding():
|
||||
import locale
|
||||
|
||||
return locale.getpreferredencoding()
|
||||
|
||||
if PY2:
|
||||
|
||||
def raw_input(prompt=""):
|
||||
sys.stderr.flush()
|
||||
if prompt:
|
||||
stdout = _default_text_stdout()
|
||||
stdout.write(prompt)
|
||||
stdin = _default_text_stdin()
|
||||
return stdin.readline().rstrip("\r\n")
|
||||
|
||||
try:
|
||||
import colorama
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
_ansi_stream_wrappers = WeakKeyDictionary()
|
||||
|
||||
def auto_wrap_for_ansi(stream, color=None):
|
||||
"""This function wraps a stream so that calls through colorama
|
||||
are issued to the win32 console API to recolor on demand. It
|
||||
also ensures to reset the colors if a write call is interrupted
|
||||
to not destroy the console afterwards.
|
||||
"""
|
||||
try:
|
||||
cached = _ansi_stream_wrappers.get(stream)
|
||||
except Exception:
|
||||
cached = None
|
||||
if cached is not None:
|
||||
return cached
|
||||
strip = should_strip_ansi(stream, color)
|
||||
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
|
||||
rv = ansi_wrapper.stream
|
||||
_write = rv.write
|
||||
|
||||
def _safe_write(s):
|
||||
try:
|
||||
return _write(s)
|
||||
except:
|
||||
ansi_wrapper.reset_all()
|
||||
raise
|
||||
|
||||
rv.write = _safe_write
|
||||
try:
|
||||
_ansi_stream_wrappers[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
return rv
|
||||
|
||||
def get_winterm_size():
|
||||
win = colorama.win32.GetConsoleScreenBufferInfo(
|
||||
colorama.win32.STDOUT
|
||||
).srWindow
|
||||
return win.Right - win.Left, win.Bottom - win.Top
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def _get_argv_encoding():
|
||||
return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding()
|
||||
|
||||
_get_windows_console_stream = lambda *x: None
|
||||
_wrap_std_stream = lambda *x: None
|
||||
|
||||
|
||||
def term_len(x):
|
||||
return len(strip_ansi(x))
|
||||
|
||||
|
||||
def isatty(stream):
|
||||
try:
|
||||
return stream.isatty()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _make_cached_stream_func(src_func, wrapper_func):
|
||||
cache = WeakKeyDictionary()
|
||||
|
||||
def func():
|
||||
stream = src_func()
|
||||
try:
|
||||
rv = cache.get(stream)
|
||||
except Exception:
|
||||
rv = None
|
||||
if rv is not None:
|
||||
return rv
|
||||
rv = wrapper_func()
|
||||
try:
|
||||
stream = src_func() # In case wrapper_func() modified the stream
|
||||
cache[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
return rv
|
||||
|
||||
return func
|
||||
|
||||
|
||||
_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
|
||||
_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
|
||||
_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
|
||||
|
||||
|
||||
binary_streams = {
|
||||
"stdin": get_binary_stdin,
|
||||
"stdout": get_binary_stdout,
|
||||
"stderr": get_binary_stderr,
|
||||
}
|
||||
|
||||
text_streams = {
|
||||
"stdin": get_text_stdin,
|
||||
"stdout": get_text_stdout,
|
||||
"stderr": get_text_stderr,
|
||||
}
|
657
matteo_env/Lib/site-packages/click/_termui_impl.py
Normal file
657
matteo_env/Lib/site-packages/click/_termui_impl.py
Normal file
|
@ -0,0 +1,657 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module contains implementations for the termui module. To keep the
|
||||
import time of Click down, some infrequently used functionality is
|
||||
placed in this module and only imported as needed.
|
||||
"""
|
||||
import contextlib
|
||||
import math
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ._compat import _default_text_stdout
|
||||
from ._compat import CYGWIN
|
||||
from ._compat import get_best_encoding
|
||||
from ._compat import int_types
|
||||
from ._compat import isatty
|
||||
from ._compat import open_stream
|
||||
from ._compat import range_type
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import term_len
|
||||
from ._compat import WIN
|
||||
from .exceptions import ClickException
|
||||
from .utils import echo
|
||||
|
||||
if os.name == "nt":
|
||||
BEFORE_BAR = "\r"
|
||||
AFTER_BAR = "\n"
|
||||
else:
|
||||
BEFORE_BAR = "\r\033[?25l"
|
||||
AFTER_BAR = "\033[?25h\n"
|
||||
|
||||
|
||||
def _length_hint(obj):
|
||||
"""Returns the length hint of an object."""
|
||||
try:
|
||||
return len(obj)
|
||||
except (AttributeError, TypeError):
|
||||
try:
|
||||
get_hint = type(obj).__length_hint__
|
||||
except AttributeError:
|
||||
return None
|
||||
try:
|
||||
hint = get_hint(obj)
|
||||
except TypeError:
|
||||
return None
|
||||
if hint is NotImplemented or not isinstance(hint, int_types) or hint < 0:
|
||||
return None
|
||||
return hint
|
||||
|
||||
|
||||
class ProgressBar(object):
|
||||
def __init__(
|
||||
self,
|
||||
iterable,
|
||||
length=None,
|
||||
fill_char="#",
|
||||
empty_char=" ",
|
||||
bar_template="%(bar)s",
|
||||
info_sep=" ",
|
||||
show_eta=True,
|
||||
show_percent=None,
|
||||
show_pos=False,
|
||||
item_show_func=None,
|
||||
label=None,
|
||||
file=None,
|
||||
color=None,
|
||||
width=30,
|
||||
):
|
||||
self.fill_char = fill_char
|
||||
self.empty_char = empty_char
|
||||
self.bar_template = bar_template
|
||||
self.info_sep = info_sep
|
||||
self.show_eta = show_eta
|
||||
self.show_percent = show_percent
|
||||
self.show_pos = show_pos
|
||||
self.item_show_func = item_show_func
|
||||
self.label = label or ""
|
||||
if file is None:
|
||||
file = _default_text_stdout()
|
||||
self.file = file
|
||||
self.color = color
|
||||
self.width = width
|
||||
self.autowidth = width == 0
|
||||
|
||||
if length is None:
|
||||
length = _length_hint(iterable)
|
||||
if iterable is None:
|
||||
if length is None:
|
||||
raise TypeError("iterable or length is required")
|
||||
iterable = range_type(length)
|
||||
self.iter = iter(iterable)
|
||||
self.length = length
|
||||
self.length_known = length is not None
|
||||
self.pos = 0
|
||||
self.avg = []
|
||||
self.start = self.last_eta = time.time()
|
||||
self.eta_known = False
|
||||
self.finished = False
|
||||
self.max_width = None
|
||||
self.entered = False
|
||||
self.current_item = None
|
||||
self.is_hidden = not isatty(self.file)
|
||||
self._last_line = None
|
||||
self.short_limit = 0.5
|
||||
|
||||
def __enter__(self):
|
||||
self.entered = True
|
||||
self.render_progress()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.render_finish()
|
||||
|
||||
def __iter__(self):
|
||||
if not self.entered:
|
||||
raise RuntimeError("You need to use progress bars in a with block.")
|
||||
self.render_progress()
|
||||
return self.generator()
|
||||
|
||||
def __next__(self):
|
||||
# Iteration is defined in terms of a generator function,
|
||||
# returned by iter(self); use that to define next(). This works
|
||||
# because `self.iter` is an iterable consumed by that generator,
|
||||
# so it is re-entry safe. Calling `next(self.generator())`
|
||||
# twice works and does "what you want".
|
||||
return next(iter(self))
|
||||
|
||||
# Python 2 compat
|
||||
next = __next__
|
||||
|
||||
def is_fast(self):
|
||||
return time.time() - self.start <= self.short_limit
|
||||
|
||||
def render_finish(self):
|
||||
if self.is_hidden or self.is_fast():
|
||||
return
|
||||
self.file.write(AFTER_BAR)
|
||||
self.file.flush()
|
||||
|
||||
@property
|
||||
def pct(self):
|
||||
if self.finished:
|
||||
return 1.0
|
||||
return min(self.pos / (float(self.length) or 1), 1.0)
|
||||
|
||||
@property
|
||||
def time_per_iteration(self):
|
||||
if not self.avg:
|
||||
return 0.0
|
||||
return sum(self.avg) / float(len(self.avg))
|
||||
|
||||
@property
|
||||
def eta(self):
|
||||
if self.length_known and not self.finished:
|
||||
return self.time_per_iteration * (self.length - self.pos)
|
||||
return 0.0
|
||||
|
||||
def format_eta(self):
|
||||
if self.eta_known:
|
||||
t = int(self.eta)
|
||||
seconds = t % 60
|
||||
t //= 60
|
||||
minutes = t % 60
|
||||
t //= 60
|
||||
hours = t % 24
|
||||
t //= 24
|
||||
if t > 0:
|
||||
return "{}d {:02}:{:02}:{:02}".format(t, hours, minutes, seconds)
|
||||
else:
|
||||
return "{:02}:{:02}:{:02}".format(hours, minutes, seconds)
|
||||
return ""
|
||||
|
||||
def format_pos(self):
|
||||
pos = str(self.pos)
|
||||
if self.length_known:
|
||||
pos += "/{}".format(self.length)
|
||||
return pos
|
||||
|
||||
def format_pct(self):
|
||||
return "{: 4}%".format(int(self.pct * 100))[1:]
|
||||
|
||||
def format_bar(self):
|
||||
if self.length_known:
|
||||
bar_length = int(self.pct * self.width)
|
||||
bar = self.fill_char * bar_length
|
||||
bar += self.empty_char * (self.width - bar_length)
|
||||
elif self.finished:
|
||||
bar = self.fill_char * self.width
|
||||
else:
|
||||
bar = list(self.empty_char * (self.width or 1))
|
||||
if self.time_per_iteration != 0:
|
||||
bar[
|
||||
int(
|
||||
(math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
|
||||
* self.width
|
||||
)
|
||||
] = self.fill_char
|
||||
bar = "".join(bar)
|
||||
return bar
|
||||
|
||||
def format_progress_line(self):
|
||||
show_percent = self.show_percent
|
||||
|
||||
info_bits = []
|
||||
if self.length_known and show_percent is None:
|
||||
show_percent = not self.show_pos
|
||||
|
||||
if self.show_pos:
|
||||
info_bits.append(self.format_pos())
|
||||
if show_percent:
|
||||
info_bits.append(self.format_pct())
|
||||
if self.show_eta and self.eta_known and not self.finished:
|
||||
info_bits.append(self.format_eta())
|
||||
if self.item_show_func is not None:
|
||||
item_info = self.item_show_func(self.current_item)
|
||||
if item_info is not None:
|
||||
info_bits.append(item_info)
|
||||
|
||||
return (
|
||||
self.bar_template
|
||||
% {
|
||||
"label": self.label,
|
||||
"bar": self.format_bar(),
|
||||
"info": self.info_sep.join(info_bits),
|
||||
}
|
||||
).rstrip()
|
||||
|
||||
def render_progress(self):
|
||||
from .termui import get_terminal_size
|
||||
|
||||
if self.is_hidden:
|
||||
return
|
||||
|
||||
buf = []
|
||||
# Update width in case the terminal has been resized
|
||||
if self.autowidth:
|
||||
old_width = self.width
|
||||
self.width = 0
|
||||
clutter_length = term_len(self.format_progress_line())
|
||||
new_width = max(0, get_terminal_size()[0] - clutter_length)
|
||||
if new_width < old_width:
|
||||
buf.append(BEFORE_BAR)
|
||||
buf.append(" " * self.max_width)
|
||||
self.max_width = new_width
|
||||
self.width = new_width
|
||||
|
||||
clear_width = self.width
|
||||
if self.max_width is not None:
|
||||
clear_width = self.max_width
|
||||
|
||||
buf.append(BEFORE_BAR)
|
||||
line = self.format_progress_line()
|
||||
line_len = term_len(line)
|
||||
if self.max_width is None or self.max_width < line_len:
|
||||
self.max_width = line_len
|
||||
|
||||
buf.append(line)
|
||||
buf.append(" " * (clear_width - line_len))
|
||||
line = "".join(buf)
|
||||
# Render the line only if it changed.
|
||||
|
||||
if line != self._last_line and not self.is_fast():
|
||||
self._last_line = line
|
||||
echo(line, file=self.file, color=self.color, nl=False)
|
||||
self.file.flush()
|
||||
|
||||
def make_step(self, n_steps):
|
||||
self.pos += n_steps
|
||||
if self.length_known and self.pos >= self.length:
|
||||
self.finished = True
|
||||
|
||||
if (time.time() - self.last_eta) < 1.0:
|
||||
return
|
||||
|
||||
self.last_eta = time.time()
|
||||
|
||||
# self.avg is a rolling list of length <= 7 of steps where steps are
|
||||
# defined as time elapsed divided by the total progress through
|
||||
# self.length.
|
||||
if self.pos:
|
||||
step = (time.time() - self.start) / self.pos
|
||||
else:
|
||||
step = time.time() - self.start
|
||||
|
||||
self.avg = self.avg[-6:] + [step]
|
||||
|
||||
self.eta_known = self.length_known
|
||||
|
||||
def update(self, n_steps):
|
||||
self.make_step(n_steps)
|
||||
self.render_progress()
|
||||
|
||||
def finish(self):
|
||||
self.eta_known = 0
|
||||
self.current_item = None
|
||||
self.finished = True
|
||||
|
||||
def generator(self):
|
||||
"""Return a generator which yields the items added to the bar
|
||||
during construction, and updates the progress bar *after* the
|
||||
yielded block returns.
|
||||
"""
|
||||
# WARNING: the iterator interface for `ProgressBar` relies on
|
||||
# this and only works because this is a simple generator which
|
||||
# doesn't create or manage additional state. If this function
|
||||
# changes, the impact should be evaluated both against
|
||||
# `iter(bar)` and `next(bar)`. `next()` in particular may call
|
||||
# `self.generator()` repeatedly, and this must remain safe in
|
||||
# order for that interface to work.
|
||||
if not self.entered:
|
||||
raise RuntimeError("You need to use progress bars in a with block.")
|
||||
|
||||
if self.is_hidden:
|
||||
for rv in self.iter:
|
||||
yield rv
|
||||
else:
|
||||
for rv in self.iter:
|
||||
self.current_item = rv
|
||||
yield rv
|
||||
self.update(1)
|
||||
self.finish()
|
||||
self.render_progress()
|
||||
|
||||
|
||||
def pager(generator, color=None):
|
||||
"""Decide what method to use for paging through text."""
|
||||
stdout = _default_text_stdout()
|
||||
if not isatty(sys.stdin) or not isatty(stdout):
|
||||
return _nullpager(stdout, generator, color)
|
||||
pager_cmd = (os.environ.get("PAGER", None) or "").strip()
|
||||
if pager_cmd:
|
||||
if WIN:
|
||||
return _tempfilepager(generator, pager_cmd, color)
|
||||
return _pipepager(generator, pager_cmd, color)
|
||||
if os.environ.get("TERM") in ("dumb", "emacs"):
|
||||
return _nullpager(stdout, generator, color)
|
||||
if WIN or sys.platform.startswith("os2"):
|
||||
return _tempfilepager(generator, "more <", color)
|
||||
if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0:
|
||||
return _pipepager(generator, "less", color)
|
||||
|
||||
import tempfile
|
||||
|
||||
fd, filename = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
if hasattr(os, "system") and os.system('more "{}"'.format(filename)) == 0:
|
||||
return _pipepager(generator, "more", color)
|
||||
return _nullpager(stdout, generator, color)
|
||||
finally:
|
||||
os.unlink(filename)
|
||||
|
||||
|
||||
def _pipepager(generator, cmd, color):
|
||||
"""Page through text by feeding it to another program. Invoking a
|
||||
pager through this might support colors.
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
env = dict(os.environ)
|
||||
|
||||
# If we're piping to less we might support colors under the
|
||||
# condition that
|
||||
cmd_detail = cmd.rsplit("/", 1)[-1].split()
|
||||
if color is None and cmd_detail[0] == "less":
|
||||
less_flags = "{}{}".format(os.environ.get("LESS", ""), " ".join(cmd_detail[1:]))
|
||||
if not less_flags:
|
||||
env["LESS"] = "-R"
|
||||
color = True
|
||||
elif "r" in less_flags or "R" in less_flags:
|
||||
color = True
|
||||
|
||||
c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env)
|
||||
encoding = get_best_encoding(c.stdin)
|
||||
try:
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
|
||||
c.stdin.write(text.encode(encoding, "replace"))
|
||||
except (IOError, KeyboardInterrupt):
|
||||
pass
|
||||
else:
|
||||
c.stdin.close()
|
||||
|
||||
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting
|
||||
# search or other commands inside less).
|
||||
#
|
||||
# That means when the user hits ^C, the parent process (click) terminates,
|
||||
# but less is still alive, paging the output and messing up the terminal.
|
||||
#
|
||||
# If the user wants to make the pager exit on ^C, they should set
|
||||
# `LESS='-K'`. It's not our decision to make.
|
||||
while True:
|
||||
try:
|
||||
c.wait()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def _tempfilepager(generator, cmd, color):
|
||||
"""Page through text by invoking a program on a temporary file."""
|
||||
import tempfile
|
||||
|
||||
filename = tempfile.mktemp()
|
||||
# TODO: This never terminates if the passed generator never terminates.
|
||||
text = "".join(generator)
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
encoding = get_best_encoding(sys.stdout)
|
||||
with open_stream(filename, "wb")[0] as f:
|
||||
f.write(text.encode(encoding))
|
||||
try:
|
||||
os.system('{} "{}"'.format(cmd, filename))
|
||||
finally:
|
||||
os.unlink(filename)
|
||||
|
||||
|
||||
def _nullpager(stream, generator, color):
|
||||
"""Simply print unformatted text. This is the ultimate fallback."""
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
stream.write(text)
|
||||
|
||||
|
||||
class Editor(object):
|
||||
def __init__(self, editor=None, env=None, require_save=True, extension=".txt"):
|
||||
self.editor = editor
|
||||
self.env = env
|
||||
self.require_save = require_save
|
||||
self.extension = extension
|
||||
|
||||
def get_editor(self):
|
||||
if self.editor is not None:
|
||||
return self.editor
|
||||
for key in "VISUAL", "EDITOR":
|
||||
rv = os.environ.get(key)
|
||||
if rv:
|
||||
return rv
|
||||
if WIN:
|
||||
return "notepad"
|
||||
for editor in "sensible-editor", "vim", "nano":
|
||||
if os.system("which {} >/dev/null 2>&1".format(editor)) == 0:
|
||||
return editor
|
||||
return "vi"
|
||||
|
||||
def edit_file(self, filename):
|
||||
import subprocess
|
||||
|
||||
editor = self.get_editor()
|
||||
if self.env:
|
||||
environ = os.environ.copy()
|
||||
environ.update(self.env)
|
||||
else:
|
||||
environ = None
|
||||
try:
|
||||
c = subprocess.Popen(
|
||||
'{} "{}"'.format(editor, filename), env=environ, shell=True,
|
||||
)
|
||||
exit_code = c.wait()
|
||||
if exit_code != 0:
|
||||
raise ClickException("{}: Editing failed!".format(editor))
|
||||
except OSError as e:
|
||||
raise ClickException("{}: Editing failed: {}".format(editor, e))
|
||||
|
||||
def edit(self, text):
|
||||
import tempfile
|
||||
|
||||
text = text or ""
|
||||
if text and not text.endswith("\n"):
|
||||
text += "\n"
|
||||
|
||||
fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
|
||||
try:
|
||||
if WIN:
|
||||
encoding = "utf-8-sig"
|
||||
text = text.replace("\n", "\r\n")
|
||||
else:
|
||||
encoding = "utf-8"
|
||||
text = text.encode(encoding)
|
||||
|
||||
f = os.fdopen(fd, "wb")
|
||||
f.write(text)
|
||||
f.close()
|
||||
timestamp = os.path.getmtime(name)
|
||||
|
||||
self.edit_file(name)
|
||||
|
||||
if self.require_save and os.path.getmtime(name) == timestamp:
|
||||
return None
|
||||
|
||||
f = open(name, "rb")
|
||||
try:
|
||||
rv = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
return rv.decode("utf-8-sig").replace("\r\n", "\n")
|
||||
finally:
|
||||
os.unlink(name)
|
||||
|
||||
|
||||
def open_url(url, wait=False, locate=False):
|
||||
import subprocess
|
||||
|
||||
def _unquote_file(url):
|
||||
try:
|
||||
import urllib
|
||||
except ImportError:
|
||||
import urllib
|
||||
if url.startswith("file://"):
|
||||
url = urllib.unquote(url[7:])
|
||||
return url
|
||||
|
||||
if sys.platform == "darwin":
|
||||
args = ["open"]
|
||||
if wait:
|
||||
args.append("-W")
|
||||
if locate:
|
||||
args.append("-R")
|
||||
args.append(_unquote_file(url))
|
||||
null = open("/dev/null", "w")
|
||||
try:
|
||||
return subprocess.Popen(args, stderr=null).wait()
|
||||
finally:
|
||||
null.close()
|
||||
elif WIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = 'explorer /select,"{}"'.format(_unquote_file(url.replace('"', "")))
|
||||
else:
|
||||
args = 'start {} "" "{}"'.format(
|
||||
"/WAIT" if wait else "", url.replace('"', "")
|
||||
)
|
||||
return os.system(args)
|
||||
elif CYGWIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = 'cygstart "{}"'.format(os.path.dirname(url).replace('"', ""))
|
||||
else:
|
||||
args = 'cygstart {} "{}"'.format("-w" if wait else "", url.replace('"', ""))
|
||||
return os.system(args)
|
||||
|
||||
try:
|
||||
if locate:
|
||||
url = os.path.dirname(_unquote_file(url)) or "."
|
||||
else:
|
||||
url = _unquote_file(url)
|
||||
c = subprocess.Popen(["xdg-open", url])
|
||||
if wait:
|
||||
return c.wait()
|
||||
return 0
|
||||
except OSError:
|
||||
if url.startswith(("http://", "https://")) and not locate and not wait:
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open(url)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def _translate_ch_to_exc(ch):
|
||||
if ch == u"\x03":
|
||||
raise KeyboardInterrupt()
|
||||
if ch == u"\x04" and not WIN: # Unix-like, Ctrl+D
|
||||
raise EOFError()
|
||||
if ch == u"\x1a" and WIN: # Windows, Ctrl+Z
|
||||
raise EOFError()
|
||||
|
||||
|
||||
if WIN:
|
||||
import msvcrt
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal():
|
||||
yield
|
||||
|
||||
def getchar(echo):
|
||||
# The function `getch` will return a bytes object corresponding to
|
||||
# the pressed character. Since Windows 10 build 1803, it will also
|
||||
# return \x00 when called a second time after pressing a regular key.
|
||||
#
|
||||
# `getwch` does not share this probably-bugged behavior. Moreover, it
|
||||
# returns a Unicode object by default, which is what we want.
|
||||
#
|
||||
# Either of these functions will return \x00 or \xe0 to indicate
|
||||
# a special key, and you need to call the same function again to get
|
||||
# the "rest" of the code. The fun part is that \u00e0 is
|
||||
# "latin small letter a with grave", so if you type that on a French
|
||||
# keyboard, you _also_ get a \xe0.
|
||||
# E.g., consider the Up arrow. This returns \xe0 and then \x48. The
|
||||
# resulting Unicode string reads as "a with grave" + "capital H".
|
||||
# This is indistinguishable from when the user actually types
|
||||
# "a with grave" and then "capital H".
|
||||
#
|
||||
# When \xe0 is returned, we assume it's part of a special-key sequence
|
||||
# and call `getwch` again, but that means that when the user types
|
||||
# the \u00e0 character, `getchar` doesn't return until a second
|
||||
# character is typed.
|
||||
# The alternative is returning immediately, but that would mess up
|
||||
# cross-platform handling of arrow keys and others that start with
|
||||
# \xe0. Another option is using `getch`, but then we can't reliably
|
||||
# read non-ASCII characters, because return values of `getch` are
|
||||
# limited to the current 8-bit codepage.
|
||||
#
|
||||
# Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
|
||||
# is doing the right thing in more situations than with `getch`.
|
||||
if echo:
|
||||
func = msvcrt.getwche
|
||||
else:
|
||||
func = msvcrt.getwch
|
||||
|
||||
rv = func()
|
||||
if rv in (u"\x00", u"\xe0"):
|
||||
# \x00 and \xe0 are control characters that indicate special key,
|
||||
# see above.
|
||||
rv += func()
|
||||
_translate_ch_to_exc(rv)
|
||||
return rv
|
||||
|
||||
|
||||
else:
|
||||
import tty
|
||||
import termios
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal():
|
||||
if not isatty(sys.stdin):
|
||||
f = open("/dev/tty")
|
||||
fd = f.fileno()
|
||||
else:
|
||||
fd = sys.stdin.fileno()
|
||||
f = None
|
||||
try:
|
||||
old_settings = termios.tcgetattr(fd)
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
yield fd
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||
sys.stdout.flush()
|
||||
if f is not None:
|
||||
f.close()
|
||||
except termios.error:
|
||||
pass
|
||||
|
||||
def getchar(echo):
|
||||
with raw_terminal() as fd:
|
||||
ch = os.read(fd, 32)
|
||||
ch = ch.decode(get_best_encoding(sys.stdin), "replace")
|
||||
if echo and isatty(sys.stdout):
|
||||
sys.stdout.write(ch)
|
||||
_translate_ch_to_exc(ch)
|
||||
return ch
|
37
matteo_env/Lib/site-packages/click/_textwrap.py
Normal file
37
matteo_env/Lib/site-packages/click/_textwrap.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
import textwrap
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class TextWrapper(textwrap.TextWrapper):
|
||||
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
|
||||
space_left = max(width - cur_len, 1)
|
||||
|
||||
if self.break_long_words:
|
||||
last = reversed_chunks[-1]
|
||||
cut = last[:space_left]
|
||||
res = last[space_left:]
|
||||
cur_line.append(cut)
|
||||
reversed_chunks[-1] = res
|
||||
elif not cur_line:
|
||||
cur_line.append(reversed_chunks.pop())
|
||||
|
||||
@contextmanager
|
||||
def extra_indent(self, indent):
|
||||
old_initial_indent = self.initial_indent
|
||||
old_subsequent_indent = self.subsequent_indent
|
||||
self.initial_indent += indent
|
||||
self.subsequent_indent += indent
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.initial_indent = old_initial_indent
|
||||
self.subsequent_indent = old_subsequent_indent
|
||||
|
||||
def indent_only(self, text):
|
||||
rv = []
|
||||
for idx, line in enumerate(text.splitlines()):
|
||||
indent = self.initial_indent
|
||||
if idx > 0:
|
||||
indent = self.subsequent_indent
|
||||
rv.append(indent + line)
|
||||
return "\n".join(rv)
|
131
matteo_env/Lib/site-packages/click/_unicodefun.py
Normal file
131
matteo_env/Lib/site-packages/click/_unicodefun.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
import codecs
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ._compat import PY2
|
||||
|
||||
|
||||
def _find_unicode_literals_frame():
|
||||
import __future__
|
||||
|
||||
if not hasattr(sys, "_getframe"): # not all Python implementations have it
|
||||
return 0
|
||||
frm = sys._getframe(1)
|
||||
idx = 1
|
||||
while frm is not None:
|
||||
if frm.f_globals.get("__name__", "").startswith("click."):
|
||||
frm = frm.f_back
|
||||
idx += 1
|
||||
elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag:
|
||||
return idx
|
||||
else:
|
||||
break
|
||||
return 0
|
||||
|
||||
|
||||
def _check_for_unicode_literals():
|
||||
if not __debug__:
|
||||
return
|
||||
|
||||
from . import disable_unicode_literals_warning
|
||||
|
||||
if not PY2 or disable_unicode_literals_warning:
|
||||
return
|
||||
bad_frame = _find_unicode_literals_frame()
|
||||
if bad_frame <= 0:
|
||||
return
|
||||
from warnings import warn
|
||||
|
||||
warn(
|
||||
Warning(
|
||||
"Click detected the use of the unicode_literals __future__"
|
||||
" import. This is heavily discouraged because it can"
|
||||
" introduce subtle bugs in your code. You should instead"
|
||||
' use explicit u"" literals for your unicode strings. For'
|
||||
" more information see"
|
||||
" https://click.palletsprojects.com/python3/"
|
||||
),
|
||||
stacklevel=bad_frame,
|
||||
)
|
||||
|
||||
|
||||
def _verify_python3_env():
|
||||
"""Ensures that the environment is good for unicode on Python 3."""
|
||||
if PY2:
|
||||
return
|
||||
try:
|
||||
import locale
|
||||
|
||||
fs_enc = codecs.lookup(locale.getpreferredencoding()).name
|
||||
except Exception:
|
||||
fs_enc = "ascii"
|
||||
if fs_enc != "ascii":
|
||||
return
|
||||
|
||||
extra = ""
|
||||
if os.name == "posix":
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
rv = subprocess.Popen(
|
||||
["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
).communicate()[0]
|
||||
except OSError:
|
||||
rv = b""
|
||||
good_locales = set()
|
||||
has_c_utf8 = False
|
||||
|
||||
# Make sure we're operating on text here.
|
||||
if isinstance(rv, bytes):
|
||||
rv = rv.decode("ascii", "replace")
|
||||
|
||||
for line in rv.splitlines():
|
||||
locale = line.strip()
|
||||
if locale.lower().endswith((".utf-8", ".utf8")):
|
||||
good_locales.add(locale)
|
||||
if locale.lower() in ("c.utf8", "c.utf-8"):
|
||||
has_c_utf8 = True
|
||||
|
||||
extra += "\n\n"
|
||||
if not good_locales:
|
||||
extra += (
|
||||
"Additional information: on this system no suitable"
|
||||
" UTF-8 locales were discovered. This most likely"
|
||||
" requires resolving by reconfiguring the locale"
|
||||
" system."
|
||||
)
|
||||
elif has_c_utf8:
|
||||
extra += (
|
||||
"This system supports the C.UTF-8 locale which is"
|
||||
" recommended. You might be able to resolve your issue"
|
||||
" by exporting the following environment variables:\n\n"
|
||||
" export LC_ALL=C.UTF-8\n"
|
||||
" export LANG=C.UTF-8"
|
||||
)
|
||||
else:
|
||||
extra += (
|
||||
"This system lists a couple of UTF-8 supporting locales"
|
||||
" that you can pick from. The following suitable"
|
||||
" locales were discovered: {}".format(", ".join(sorted(good_locales)))
|
||||
)
|
||||
|
||||
bad_locale = None
|
||||
for locale in os.environ.get("LC_ALL"), os.environ.get("LANG"):
|
||||
if locale and locale.lower().endswith((".utf-8", ".utf8")):
|
||||
bad_locale = locale
|
||||
if locale is not None:
|
||||
break
|
||||
if bad_locale is not None:
|
||||
extra += (
|
||||
"\n\nClick discovered that you exported a UTF-8 locale"
|
||||
" but the locale system could not pick up from it"
|
||||
" because it does not exist. The exported locale is"
|
||||
" '{}' but it is not supported".format(bad_locale)
|
||||
)
|
||||
|
||||
raise RuntimeError(
|
||||
"Click will abort further execution because Python 3 was"
|
||||
" configured to use ASCII as encoding for the environment."
|
||||
" Consult https://click.palletsprojects.com/python3/ for"
|
||||
" mitigation steps.{}".format(extra)
|
||||
)
|
370
matteo_env/Lib/site-packages/click/_winconsole.py
Normal file
370
matteo_env/Lib/site-packages/click/_winconsole.py
Normal file
|
@ -0,0 +1,370 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# This module is based on the excellent work by Adam Bartoš who
|
||||
# provided a lot of what went into the implementation here in
|
||||
# the discussion to issue1602 in the Python bug tracker.
|
||||
#
|
||||
# There are some general differences in regards to how this works
|
||||
# compared to the original patches as we do not need to patch
|
||||
# the entire interpreter but just work in our little world of
|
||||
# echo and prmopt.
|
||||
import ctypes
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import zlib
|
||||
from ctypes import byref
|
||||
from ctypes import c_char
|
||||
from ctypes import c_char_p
|
||||
from ctypes import c_int
|
||||
from ctypes import c_ssize_t
|
||||
from ctypes import c_ulong
|
||||
from ctypes import c_void_p
|
||||
from ctypes import POINTER
|
||||
from ctypes import py_object
|
||||
from ctypes import windll
|
||||
from ctypes import WinError
|
||||
from ctypes import WINFUNCTYPE
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes.wintypes import HANDLE
|
||||
from ctypes.wintypes import LPCWSTR
|
||||
from ctypes.wintypes import LPWSTR
|
||||
|
||||
import msvcrt
|
||||
|
||||
from ._compat import _NonClosingTextIOWrapper
|
||||
from ._compat import PY2
|
||||
from ._compat import text_type
|
||||
|
||||
try:
|
||||
from ctypes import pythonapi
|
||||
|
||||
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
|
||||
PyBuffer_Release = pythonapi.PyBuffer_Release
|
||||
except ImportError:
|
||||
pythonapi = None
|
||||
|
||||
|
||||
c_ssize_p = POINTER(c_ssize_t)
|
||||
|
||||
kernel32 = windll.kernel32
|
||||
GetStdHandle = kernel32.GetStdHandle
|
||||
ReadConsoleW = kernel32.ReadConsoleW
|
||||
WriteConsoleW = kernel32.WriteConsoleW
|
||||
GetConsoleMode = kernel32.GetConsoleMode
|
||||
GetLastError = kernel32.GetLastError
|
||||
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
|
||||
CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
|
||||
("CommandLineToArgvW", windll.shell32)
|
||||
)
|
||||
LocalFree = WINFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)(
|
||||
("LocalFree", windll.kernel32)
|
||||
)
|
||||
|
||||
|
||||
STDIN_HANDLE = GetStdHandle(-10)
|
||||
STDOUT_HANDLE = GetStdHandle(-11)
|
||||
STDERR_HANDLE = GetStdHandle(-12)
|
||||
|
||||
|
||||
PyBUF_SIMPLE = 0
|
||||
PyBUF_WRITABLE = 1
|
||||
|
||||
ERROR_SUCCESS = 0
|
||||
ERROR_NOT_ENOUGH_MEMORY = 8
|
||||
ERROR_OPERATION_ABORTED = 995
|
||||
|
||||
STDIN_FILENO = 0
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
|
||||
EOF = b"\x1a"
|
||||
MAX_BYTES_WRITTEN = 32767
|
||||
|
||||
|
||||
class Py_buffer(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("buf", c_void_p),
|
||||
("obj", py_object),
|
||||
("len", c_ssize_t),
|
||||
("itemsize", c_ssize_t),
|
||||
("readonly", c_int),
|
||||
("ndim", c_int),
|
||||
("format", c_char_p),
|
||||
("shape", c_ssize_p),
|
||||
("strides", c_ssize_p),
|
||||
("suboffsets", c_ssize_p),
|
||||
("internal", c_void_p),
|
||||
]
|
||||
|
||||
if PY2:
|
||||
_fields_.insert(-1, ("smalltable", c_ssize_t * 2))
|
||||
|
||||
|
||||
# On PyPy we cannot get buffers so our ability to operate here is
|
||||
# serverly limited.
|
||||
if pythonapi is None:
|
||||
get_buffer = None
|
||||
else:
|
||||
|
||||
def get_buffer(obj, writable=False):
|
||||
buf = Py_buffer()
|
||||
flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
|
||||
PyObject_GetBuffer(py_object(obj), byref(buf), flags)
|
||||
try:
|
||||
buffer_type = c_char * buf.len
|
||||
return buffer_type.from_address(buf.buf)
|
||||
finally:
|
||||
PyBuffer_Release(byref(buf))
|
||||
|
||||
|
||||
class _WindowsConsoleRawIOBase(io.RawIOBase):
|
||||
def __init__(self, handle):
|
||||
self.handle = handle
|
||||
|
||||
def isatty(self):
|
||||
io.RawIOBase.isatty(self)
|
||||
return True
|
||||
|
||||
|
||||
class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
|
||||
def readable(self):
|
||||
return True
|
||||
|
||||
def readinto(self, b):
|
||||
bytes_to_be_read = len(b)
|
||||
if not bytes_to_be_read:
|
||||
return 0
|
||||
elif bytes_to_be_read % 2:
|
||||
raise ValueError(
|
||||
"cannot read odd number of bytes from UTF-16-LE encoded console"
|
||||
)
|
||||
|
||||
buffer = get_buffer(b, writable=True)
|
||||
code_units_to_be_read = bytes_to_be_read // 2
|
||||
code_units_read = c_ulong()
|
||||
|
||||
rv = ReadConsoleW(
|
||||
HANDLE(self.handle),
|
||||
buffer,
|
||||
code_units_to_be_read,
|
||||
byref(code_units_read),
|
||||
None,
|
||||
)
|
||||
if GetLastError() == ERROR_OPERATION_ABORTED:
|
||||
# wait for KeyboardInterrupt
|
||||
time.sleep(0.1)
|
||||
if not rv:
|
||||
raise OSError("Windows error: {}".format(GetLastError()))
|
||||
|
||||
if buffer[0] == EOF:
|
||||
return 0
|
||||
return 2 * code_units_read.value
|
||||
|
||||
|
||||
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
|
||||
def writable(self):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_error_message(errno):
|
||||
if errno == ERROR_SUCCESS:
|
||||
return "ERROR_SUCCESS"
|
||||
elif errno == ERROR_NOT_ENOUGH_MEMORY:
|
||||
return "ERROR_NOT_ENOUGH_MEMORY"
|
||||
return "Windows error {}".format(errno)
|
||||
|
||||
def write(self, b):
|
||||
bytes_to_be_written = len(b)
|
||||
buf = get_buffer(b)
|
||||
code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
|
||||
code_units_written = c_ulong()
|
||||
|
||||
WriteConsoleW(
|
||||
HANDLE(self.handle),
|
||||
buf,
|
||||
code_units_to_be_written,
|
||||
byref(code_units_written),
|
||||
None,
|
||||
)
|
||||
bytes_written = 2 * code_units_written.value
|
||||
|
||||
if bytes_written == 0 and bytes_to_be_written > 0:
|
||||
raise OSError(self._get_error_message(GetLastError()))
|
||||
return bytes_written
|
||||
|
||||
|
||||
class ConsoleStream(object):
|
||||
def __init__(self, text_stream, byte_stream):
|
||||
self._text_stream = text_stream
|
||||
self.buffer = byte_stream
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.buffer.name
|
||||
|
||||
def write(self, x):
|
||||
if isinstance(x, text_type):
|
||||
return self._text_stream.write(x)
|
||||
try:
|
||||
self.flush()
|
||||
except Exception:
|
||||
pass
|
||||
return self.buffer.write(x)
|
||||
|
||||
def writelines(self, lines):
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._text_stream, name)
|
||||
|
||||
def isatty(self):
|
||||
return self.buffer.isatty()
|
||||
|
||||
def __repr__(self):
|
||||
return "<ConsoleStream name={!r} encoding={!r}>".format(
|
||||
self.name, self.encoding
|
||||
)
|
||||
|
||||
|
||||
class WindowsChunkedWriter(object):
|
||||
"""
|
||||
Wraps a stream (such as stdout), acting as a transparent proxy for all
|
||||
attribute access apart from method 'write()' which we wrap to write in
|
||||
limited chunks due to a Windows limitation on binary console streams.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped):
|
||||
# double-underscore everything to prevent clashes with names of
|
||||
# attributes on the wrapped stream object.
|
||||
self.__wrapped = wrapped
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__wrapped, name)
|
||||
|
||||
def write(self, text):
|
||||
total_to_write = len(text)
|
||||
written = 0
|
||||
|
||||
while written < total_to_write:
|
||||
to_write = min(total_to_write - written, MAX_BYTES_WRITTEN)
|
||||
self.__wrapped.write(text[written : written + to_write])
|
||||
written += to_write
|
||||
|
||||
|
||||
_wrapped_std_streams = set()
|
||||
|
||||
|
||||
def _wrap_std_stream(name):
|
||||
# Python 2 & Windows 7 and below
|
||||
if (
|
||||
PY2
|
||||
and sys.getwindowsversion()[:2] <= (6, 1)
|
||||
and name not in _wrapped_std_streams
|
||||
):
|
||||
setattr(sys, name, WindowsChunkedWriter(getattr(sys, name)))
|
||||
_wrapped_std_streams.add(name)
|
||||
|
||||
|
||||
def _get_text_stdin(buffer_stream):
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return ConsoleStream(text_stream, buffer_stream)
|
||||
|
||||
|
||||
def _get_text_stdout(buffer_stream):
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return ConsoleStream(text_stream, buffer_stream)
|
||||
|
||||
|
||||
def _get_text_stderr(buffer_stream):
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return ConsoleStream(text_stream, buffer_stream)
|
||||
|
||||
|
||||
if PY2:
|
||||
|
||||
def _hash_py_argv():
|
||||
return zlib.crc32("\x00".join(sys.argv[1:]))
|
||||
|
||||
_initial_argv_hash = _hash_py_argv()
|
||||
|
||||
def _get_windows_argv():
|
||||
argc = c_int(0)
|
||||
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
|
||||
if not argv_unicode:
|
||||
raise WinError()
|
||||
try:
|
||||
argv = [argv_unicode[i] for i in range(0, argc.value)]
|
||||
finally:
|
||||
LocalFree(argv_unicode)
|
||||
del argv_unicode
|
||||
|
||||
if not hasattr(sys, "frozen"):
|
||||
argv = argv[1:]
|
||||
while len(argv) > 0:
|
||||
arg = argv[0]
|
||||
if not arg.startswith("-") or arg == "-":
|
||||
break
|
||||
argv = argv[1:]
|
||||
if arg.startswith(("-c", "-m")):
|
||||
break
|
||||
|
||||
return argv[1:]
|
||||
|
||||
|
||||
_stream_factories = {
|
||||
0: _get_text_stdin,
|
||||
1: _get_text_stdout,
|
||||
2: _get_text_stderr,
|
||||
}
|
||||
|
||||
|
||||
def _is_console(f):
|
||||
if not hasattr(f, "fileno"):
|
||||
return False
|
||||
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
handle = msvcrt.get_osfhandle(fileno)
|
||||
return bool(GetConsoleMode(handle, byref(DWORD())))
|
||||
|
||||
|
||||
def _get_windows_console_stream(f, encoding, errors):
|
||||
if (
|
||||
get_buffer is not None
|
||||
and encoding in ("utf-16-le", None)
|
||||
and errors in ("strict", None)
|
||||
and _is_console(f)
|
||||
):
|
||||
func = _stream_factories.get(f.fileno())
|
||||
if func is not None:
|
||||
if not PY2:
|
||||
f = getattr(f, "buffer", None)
|
||||
if f is None:
|
||||
return None
|
||||
else:
|
||||
# If we are on Python 2 we need to set the stream that we
|
||||
# deal with to binary mode as otherwise the exercise if a
|
||||
# bit moot. The same problems apply as for
|
||||
# get_binary_stdin and friends from _compat.
|
||||
msvcrt.setmode(f.fileno(), os.O_BINARY)
|
||||
return func(f)
|
2030
matteo_env/Lib/site-packages/click/core.py
Normal file
2030
matteo_env/Lib/site-packages/click/core.py
Normal file
File diff suppressed because it is too large
Load Diff
333
matteo_env/Lib/site-packages/click/decorators.py
Normal file
333
matteo_env/Lib/site-packages/click/decorators.py
Normal file
|
@ -0,0 +1,333 @@
|
|||
import inspect
|
||||
import sys
|
||||
from functools import update_wrapper
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._unicodefun import _check_for_unicode_literals
|
||||
from .core import Argument
|
||||
from .core import Command
|
||||
from .core import Group
|
||||
from .core import Option
|
||||
from .globals import get_current_context
|
||||
from .utils import echo
|
||||
|
||||
|
||||
def pass_context(f):
|
||||
"""Marks a callback as wanting to receive the current context
|
||||
object as first argument.
|
||||
"""
|
||||
|
||||
def new_func(*args, **kwargs):
|
||||
return f(get_current_context(), *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def pass_obj(f):
|
||||
"""Similar to :func:`pass_context`, but only pass the object on the
|
||||
context onwards (:attr:`Context.obj`). This is useful if that object
|
||||
represents the state of a nested system.
|
||||
"""
|
||||
|
||||
def new_func(*args, **kwargs):
|
||||
return f(get_current_context().obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def make_pass_decorator(object_type, ensure=False):
|
||||
"""Given an object type this creates a decorator that will work
|
||||
similar to :func:`pass_obj` but instead of passing the object of the
|
||||
current context, it will find the innermost context of type
|
||||
:func:`object_type`.
|
||||
|
||||
This generates a decorator that works roughly like this::
|
||||
|
||||
from functools import update_wrapper
|
||||
|
||||
def decorator(f):
|
||||
@pass_context
|
||||
def new_func(ctx, *args, **kwargs):
|
||||
obj = ctx.find_object(object_type)
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
return decorator
|
||||
|
||||
:param object_type: the type of the object to pass.
|
||||
:param ensure: if set to `True`, a new object will be created and
|
||||
remembered on the context if it's not there yet.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
def new_func(*args, **kwargs):
|
||||
ctx = get_current_context()
|
||||
if ensure:
|
||||
obj = ctx.ensure_object(object_type)
|
||||
else:
|
||||
obj = ctx.find_object(object_type)
|
||||
if obj is None:
|
||||
raise RuntimeError(
|
||||
"Managed to invoke callback without a context"
|
||||
" object of type '{}' existing".format(object_type.__name__)
|
||||
)
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def _make_command(f, name, attrs, cls):
|
||||
if isinstance(f, Command):
|
||||
raise TypeError("Attempted to convert a callback into a command twice.")
|
||||
try:
|
||||
params = f.__click_params__
|
||||
params.reverse()
|
||||
del f.__click_params__
|
||||
except AttributeError:
|
||||
params = []
|
||||
help = attrs.get("help")
|
||||
if help is None:
|
||||
help = inspect.getdoc(f)
|
||||
if isinstance(help, bytes):
|
||||
help = help.decode("utf-8")
|
||||
else:
|
||||
help = inspect.cleandoc(help)
|
||||
attrs["help"] = help
|
||||
_check_for_unicode_literals()
|
||||
return cls(
|
||||
name=name or f.__name__.lower().replace("_", "-"),
|
||||
callback=f,
|
||||
params=params,
|
||||
**attrs
|
||||
)
|
||||
|
||||
|
||||
def command(name=None, cls=None, **attrs):
|
||||
r"""Creates a new :class:`Command` and uses the decorated function as
|
||||
callback. This will also automatically attach all decorated
|
||||
:func:`option`\s and :func:`argument`\s as parameters to the command.
|
||||
|
||||
The name of the command defaults to the name of the function with
|
||||
underscores replaced by dashes. If you want to change that, you can
|
||||
pass the intended name as the first argument.
|
||||
|
||||
All keyword arguments are forwarded to the underlying command class.
|
||||
|
||||
Once decorated the function turns into a :class:`Command` instance
|
||||
that can be invoked as a command line utility or be attached to a
|
||||
command :class:`Group`.
|
||||
|
||||
:param name: the name of the command. This defaults to the function
|
||||
name with underscores replaced by dashes.
|
||||
:param cls: the command class to instantiate. This defaults to
|
||||
:class:`Command`.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = Command
|
||||
|
||||
def decorator(f):
|
||||
cmd = _make_command(f, name, attrs, cls)
|
||||
cmd.__doc__ = f.__doc__
|
||||
return cmd
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def group(name=None, **attrs):
|
||||
"""Creates a new :class:`Group` with a function as callback. This
|
||||
works otherwise the same as :func:`command` just that the `cls`
|
||||
parameter is set to :class:`Group`.
|
||||
"""
|
||||
attrs.setdefault("cls", Group)
|
||||
return command(name, **attrs)
|
||||
|
||||
|
||||
def _param_memo(f, param):
|
||||
if isinstance(f, Command):
|
||||
f.params.append(param)
|
||||
else:
|
||||
if not hasattr(f, "__click_params__"):
|
||||
f.__click_params__ = []
|
||||
f.__click_params__.append(param)
|
||||
|
||||
|
||||
def argument(*param_decls, **attrs):
|
||||
"""Attaches an argument to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Argument`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Argument` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
:param cls: the argument class to instantiate. This defaults to
|
||||
:class:`Argument`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
ArgumentClass = attrs.pop("cls", Argument)
|
||||
_param_memo(f, ArgumentClass(param_decls, **attrs))
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def option(*param_decls, **attrs):
|
||||
"""Attaches an option to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Option`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Option` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
:param cls: the option class to instantiate. This defaults to
|
||||
:class:`Option`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
# Issue 926, copy attrs, so pre-defined options can re-use the same cls=
|
||||
option_attrs = attrs.copy()
|
||||
|
||||
if "help" in option_attrs:
|
||||
option_attrs["help"] = inspect.cleandoc(option_attrs["help"])
|
||||
OptionClass = option_attrs.pop("cls", Option)
|
||||
_param_memo(f, OptionClass(param_decls, **option_attrs))
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def confirmation_option(*param_decls, **attrs):
|
||||
"""Shortcut for confirmation prompts that can be ignored by passing
|
||||
``--yes`` as parameter.
|
||||
|
||||
This is equivalent to decorating a function with :func:`option` with
|
||||
the following parameters::
|
||||
|
||||
def callback(ctx, param, value):
|
||||
if not value:
|
||||
ctx.abort()
|
||||
|
||||
@click.command()
|
||||
@click.option('--yes', is_flag=True, callback=callback,
|
||||
expose_value=False, prompt='Do you want to continue?')
|
||||
def dropdb():
|
||||
pass
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
def callback(ctx, param, value):
|
||||
if not value:
|
||||
ctx.abort()
|
||||
|
||||
attrs.setdefault("is_flag", True)
|
||||
attrs.setdefault("callback", callback)
|
||||
attrs.setdefault("expose_value", False)
|
||||
attrs.setdefault("prompt", "Do you want to continue?")
|
||||
attrs.setdefault("help", "Confirm the action without prompting.")
|
||||
return option(*(param_decls or ("--yes",)), **attrs)(f)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def password_option(*param_decls, **attrs):
|
||||
"""Shortcut for password prompts.
|
||||
|
||||
This is equivalent to decorating a function with :func:`option` with
|
||||
the following parameters::
|
||||
|
||||
@click.command()
|
||||
@click.option('--password', prompt=True, confirmation_prompt=True,
|
||||
hide_input=True)
|
||||
def changeadmin(password):
|
||||
pass
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
attrs.setdefault("prompt", True)
|
||||
attrs.setdefault("confirmation_prompt", True)
|
||||
attrs.setdefault("hide_input", True)
|
||||
return option(*(param_decls or ("--password",)), **attrs)(f)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def version_option(version=None, *param_decls, **attrs):
|
||||
"""Adds a ``--version`` option which immediately ends the program
|
||||
printing out the version number. This is implemented as an eager
|
||||
option that prints the version and exits the program in the callback.
|
||||
|
||||
:param version: the version number to show. If not provided Click
|
||||
attempts an auto discovery via setuptools.
|
||||
:param prog_name: the name of the program (defaults to autodetection)
|
||||
:param message: custom message to show instead of the default
|
||||
(``'%(prog)s, version %(version)s'``)
|
||||
:param others: everything else is forwarded to :func:`option`.
|
||||
"""
|
||||
if version is None:
|
||||
if hasattr(sys, "_getframe"):
|
||||
module = sys._getframe(1).f_globals.get("__name__")
|
||||
else:
|
||||
module = ""
|
||||
|
||||
def decorator(f):
|
||||
prog_name = attrs.pop("prog_name", None)
|
||||
message = attrs.pop("message", "%(prog)s, version %(version)s")
|
||||
|
||||
def callback(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
prog = prog_name
|
||||
if prog is None:
|
||||
prog = ctx.find_root().info_name
|
||||
ver = version
|
||||
if ver is None:
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
for dist in pkg_resources.working_set:
|
||||
scripts = dist.get_entry_map().get("console_scripts") or {}
|
||||
for _, entry_point in iteritems(scripts):
|
||||
if entry_point.module_name == module:
|
||||
ver = dist.version
|
||||
break
|
||||
if ver is None:
|
||||
raise RuntimeError("Could not determine version")
|
||||
echo(message % {"prog": prog, "version": ver}, color=ctx.color)
|
||||
ctx.exit()
|
||||
|
||||
attrs.setdefault("is_flag", True)
|
||||
attrs.setdefault("expose_value", False)
|
||||
attrs.setdefault("is_eager", True)
|
||||
attrs.setdefault("help", "Show the version and exit.")
|
||||
attrs["callback"] = callback
|
||||
return option(*(param_decls or ("--version",)), **attrs)(f)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def help_option(*param_decls, **attrs):
|
||||
"""Adds a ``--help`` option which immediately ends the program
|
||||
printing out the help page. This is usually unnecessary to add as
|
||||
this is added by default to all commands unless suppressed.
|
||||
|
||||
Like :func:`version_option`, this is implemented as eager option that
|
||||
prints in the callback and exits.
|
||||
|
||||
All arguments are forwarded to :func:`option`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
def callback(ctx, param, value):
|
||||
if value and not ctx.resilient_parsing:
|
||||
echo(ctx.get_help(), color=ctx.color)
|
||||
ctx.exit()
|
||||
|
||||
attrs.setdefault("is_flag", True)
|
||||
attrs.setdefault("expose_value", False)
|
||||
attrs.setdefault("help", "Show this message and exit.")
|
||||
attrs.setdefault("is_eager", True)
|
||||
attrs["callback"] = callback
|
||||
return option(*(param_decls or ("--help",)), **attrs)(f)
|
||||
|
||||
return decorator
|
253
matteo_env/Lib/site-packages/click/exceptions.py
Normal file
253
matteo_env/Lib/site-packages/click/exceptions.py
Normal file
|
@ -0,0 +1,253 @@
|
|||
from ._compat import filename_to_ui
|
||||
from ._compat import get_text_stderr
|
||||
from ._compat import PY2
|
||||
from .utils import echo
|
||||
|
||||
|
||||
def _join_param_hints(param_hint):
|
||||
if isinstance(param_hint, (tuple, list)):
|
||||
return " / ".join(repr(x) for x in param_hint)
|
||||
return param_hint
|
||||
|
||||
|
||||
class ClickException(Exception):
|
||||
"""An exception that Click can handle and show to the user."""
|
||||
|
||||
#: The exit code for this exception
|
||||
exit_code = 1
|
||||
|
||||
def __init__(self, message):
|
||||
ctor_msg = message
|
||||
if PY2:
|
||||
if ctor_msg is not None:
|
||||
ctor_msg = ctor_msg.encode("utf-8")
|
||||
Exception.__init__(self, ctor_msg)
|
||||
self.message = message
|
||||
|
||||
def format_message(self):
|
||||
return self.message
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
if PY2:
|
||||
__unicode__ = __str__
|
||||
|
||||
def __str__(self):
|
||||
return self.message.encode("utf-8")
|
||||
|
||||
def show(self, file=None):
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
echo("Error: {}".format(self.format_message()), file=file)
|
||||
|
||||
|
||||
class UsageError(ClickException):
|
||||
"""An internal exception that signals a usage error. This typically
|
||||
aborts any further handling.
|
||||
|
||||
:param message: the error message to display.
|
||||
:param ctx: optionally the context that caused this error. Click will
|
||||
fill in the context automatically in some situations.
|
||||
"""
|
||||
|
||||
exit_code = 2
|
||||
|
||||
def __init__(self, message, ctx=None):
|
||||
ClickException.__init__(self, message)
|
||||
self.ctx = ctx
|
||||
self.cmd = self.ctx.command if self.ctx else None
|
||||
|
||||
def show(self, file=None):
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
color = None
|
||||
hint = ""
|
||||
if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None:
|
||||
hint = "Try '{} {}' for help.\n".format(
|
||||
self.ctx.command_path, self.ctx.help_option_names[0]
|
||||
)
|
||||
if self.ctx is not None:
|
||||
color = self.ctx.color
|
||||
echo("{}\n{}".format(self.ctx.get_usage(), hint), file=file, color=color)
|
||||
echo("Error: {}".format(self.format_message()), file=file, color=color)
|
||||
|
||||
|
||||
class BadParameter(UsageError):
|
||||
"""An exception that formats out a standardized error message for a
|
||||
bad parameter. This is useful when thrown from a callback or type as
|
||||
Click will attach contextual information to it (for instance, which
|
||||
parameter it is).
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param param: the parameter object that caused this error. This can
|
||||
be left out, and Click will attach this info itself
|
||||
if possible.
|
||||
:param param_hint: a string that shows up as parameter name. This
|
||||
can be used as alternative to `param` in cases
|
||||
where custom validation should happen. If it is
|
||||
a string it's used as such, if it's a list then
|
||||
each item is quoted and separated.
|
||||
"""
|
||||
|
||||
def __init__(self, message, ctx=None, param=None, param_hint=None):
|
||||
UsageError.__init__(self, message, ctx)
|
||||
self.param = param
|
||||
self.param_hint = param_hint
|
||||
|
||||
def format_message(self):
|
||||
if self.param_hint is not None:
|
||||
param_hint = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx)
|
||||
else:
|
||||
return "Invalid value: {}".format(self.message)
|
||||
param_hint = _join_param_hints(param_hint)
|
||||
|
||||
return "Invalid value for {}: {}".format(param_hint, self.message)
|
||||
|
||||
|
||||
class MissingParameter(BadParameter):
|
||||
"""Raised if click required an option or argument but it was not
|
||||
provided when invoking the script.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param param_type: a string that indicates the type of the parameter.
|
||||
The default is to inherit the parameter type from
|
||||
the given `param`. Valid values are ``'parameter'``,
|
||||
``'option'`` or ``'argument'``.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, message=None, ctx=None, param=None, param_hint=None, param_type=None
|
||||
):
|
||||
BadParameter.__init__(self, message, ctx, param, param_hint)
|
||||
self.param_type = param_type
|
||||
|
||||
def format_message(self):
|
||||
if self.param_hint is not None:
|
||||
param_hint = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx)
|
||||
else:
|
||||
param_hint = None
|
||||
param_hint = _join_param_hints(param_hint)
|
||||
|
||||
param_type = self.param_type
|
||||
if param_type is None and self.param is not None:
|
||||
param_type = self.param.param_type_name
|
||||
|
||||
msg = self.message
|
||||
if self.param is not None:
|
||||
msg_extra = self.param.type.get_missing_message(self.param)
|
||||
if msg_extra:
|
||||
if msg:
|
||||
msg += ". {}".format(msg_extra)
|
||||
else:
|
||||
msg = msg_extra
|
||||
|
||||
return "Missing {}{}{}{}".format(
|
||||
param_type,
|
||||
" {}".format(param_hint) if param_hint else "",
|
||||
". " if msg else ".",
|
||||
msg or "",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self.message is None:
|
||||
param_name = self.param.name if self.param else None
|
||||
return "missing parameter: {}".format(param_name)
|
||||
else:
|
||||
return self.message
|
||||
|
||||
if PY2:
|
||||
__unicode__ = __str__
|
||||
|
||||
def __str__(self):
|
||||
return self.__unicode__().encode("utf-8")
|
||||
|
||||
|
||||
class NoSuchOption(UsageError):
|
||||
"""Raised if click attempted to handle an option that does not
|
||||
exist.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
"""
|
||||
|
||||
def __init__(self, option_name, message=None, possibilities=None, ctx=None):
|
||||
if message is None:
|
||||
message = "no such option: {}".format(option_name)
|
||||
UsageError.__init__(self, message, ctx)
|
||||
self.option_name = option_name
|
||||
self.possibilities = possibilities
|
||||
|
||||
def format_message(self):
|
||||
bits = [self.message]
|
||||
if self.possibilities:
|
||||
if len(self.possibilities) == 1:
|
||||
bits.append("Did you mean {}?".format(self.possibilities[0]))
|
||||
else:
|
||||
possibilities = sorted(self.possibilities)
|
||||
bits.append("(Possible options: {})".format(", ".join(possibilities)))
|
||||
return " ".join(bits)
|
||||
|
||||
|
||||
class BadOptionUsage(UsageError):
|
||||
"""Raised if an option is generally supplied but the use of the option
|
||||
was incorrect. This is for instance raised if the number of arguments
|
||||
for an option is not correct.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param option_name: the name of the option being used incorrectly.
|
||||
"""
|
||||
|
||||
def __init__(self, option_name, message, ctx=None):
|
||||
UsageError.__init__(self, message, ctx)
|
||||
self.option_name = option_name
|
||||
|
||||
|
||||
class BadArgumentUsage(UsageError):
|
||||
"""Raised if an argument is generally supplied but the use of the argument
|
||||
was incorrect. This is for instance raised if the number of values
|
||||
for an argument is not correct.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
"""
|
||||
|
||||
def __init__(self, message, ctx=None):
|
||||
UsageError.__init__(self, message, ctx)
|
||||
|
||||
|
||||
class FileError(ClickException):
|
||||
"""Raised if a file cannot be opened."""
|
||||
|
||||
def __init__(self, filename, hint=None):
|
||||
ui_filename = filename_to_ui(filename)
|
||||
if hint is None:
|
||||
hint = "unknown error"
|
||||
ClickException.__init__(self, hint)
|
||||
self.ui_filename = ui_filename
|
||||
self.filename = filename
|
||||
|
||||
def format_message(self):
|
||||
return "Could not open file {}: {}".format(self.ui_filename, self.message)
|
||||
|
||||
|
||||
class Abort(RuntimeError):
|
||||
"""An internal signalling exception that signals Click to abort."""
|
||||
|
||||
|
||||
class Exit(RuntimeError):
|
||||
"""An exception that indicates that the application should exit with some
|
||||
status code.
|
||||
|
||||
:param code: the status code to exit with.
|
||||
"""
|
||||
|
||||
__slots__ = ("exit_code",)
|
||||
|
||||
def __init__(self, code=0):
|
||||
self.exit_code = code
|
283
matteo_env/Lib/site-packages/click/formatting.py
Normal file
283
matteo_env/Lib/site-packages/click/formatting.py
Normal file
|
@ -0,0 +1,283 @@
|
|||
from contextlib import contextmanager
|
||||
|
||||
from ._compat import term_len
|
||||
from .parser import split_opt
|
||||
from .termui import get_terminal_size
|
||||
|
||||
# Can force a width. This is used by the test system
|
||||
FORCED_WIDTH = None
|
||||
|
||||
|
||||
def measure_table(rows):
|
||||
widths = {}
|
||||
for row in rows:
|
||||
for idx, col in enumerate(row):
|
||||
widths[idx] = max(widths.get(idx, 0), term_len(col))
|
||||
return tuple(y for x, y in sorted(widths.items()))
|
||||
|
||||
|
||||
def iter_rows(rows, col_count):
|
||||
for row in rows:
|
||||
row = tuple(row)
|
||||
yield row + ("",) * (col_count - len(row))
|
||||
|
||||
|
||||
def wrap_text(
|
||||
text, width=78, initial_indent="", subsequent_indent="", preserve_paragraphs=False
|
||||
):
|
||||
"""A helper function that intelligently wraps text. By default, it
|
||||
assumes that it operates on a single paragraph of text but if the
|
||||
`preserve_paragraphs` parameter is provided it will intelligently
|
||||
handle paragraphs (defined by two empty lines).
|
||||
|
||||
If paragraphs are handled, a paragraph can be prefixed with an empty
|
||||
line containing the ``\\b`` character (``\\x08``) to indicate that
|
||||
no rewrapping should happen in that block.
|
||||
|
||||
:param text: the text that should be rewrapped.
|
||||
:param width: the maximum width for the text.
|
||||
:param initial_indent: the initial indent that should be placed on the
|
||||
first line as a string.
|
||||
:param subsequent_indent: the indent string that should be placed on
|
||||
each consecutive line.
|
||||
:param preserve_paragraphs: if this flag is set then the wrapping will
|
||||
intelligently handle paragraphs.
|
||||
"""
|
||||
from ._textwrap import TextWrapper
|
||||
|
||||
text = text.expandtabs()
|
||||
wrapper = TextWrapper(
|
||||
width,
|
||||
initial_indent=initial_indent,
|
||||
subsequent_indent=subsequent_indent,
|
||||
replace_whitespace=False,
|
||||
)
|
||||
if not preserve_paragraphs:
|
||||
return wrapper.fill(text)
|
||||
|
||||
p = []
|
||||
buf = []
|
||||
indent = None
|
||||
|
||||
def _flush_par():
|
||||
if not buf:
|
||||
return
|
||||
if buf[0].strip() == "\b":
|
||||
p.append((indent or 0, True, "\n".join(buf[1:])))
|
||||
else:
|
||||
p.append((indent or 0, False, " ".join(buf)))
|
||||
del buf[:]
|
||||
|
||||
for line in text.splitlines():
|
||||
if not line:
|
||||
_flush_par()
|
||||
indent = None
|
||||
else:
|
||||
if indent is None:
|
||||
orig_len = term_len(line)
|
||||
line = line.lstrip()
|
||||
indent = orig_len - term_len(line)
|
||||
buf.append(line)
|
||||
_flush_par()
|
||||
|
||||
rv = []
|
||||
for indent, raw, text in p:
|
||||
with wrapper.extra_indent(" " * indent):
|
||||
if raw:
|
||||
rv.append(wrapper.indent_only(text))
|
||||
else:
|
||||
rv.append(wrapper.fill(text))
|
||||
|
||||
return "\n\n".join(rv)
|
||||
|
||||
|
||||
class HelpFormatter(object):
|
||||
"""This class helps with formatting text-based help pages. It's
|
||||
usually just needed for very special internal cases, but it's also
|
||||
exposed so that developers can write their own fancy outputs.
|
||||
|
||||
At present, it always writes into memory.
|
||||
|
||||
:param indent_increment: the additional increment for each level.
|
||||
:param width: the width for the text. This defaults to the terminal
|
||||
width clamped to a maximum of 78.
|
||||
"""
|
||||
|
||||
def __init__(self, indent_increment=2, width=None, max_width=None):
|
||||
self.indent_increment = indent_increment
|
||||
if max_width is None:
|
||||
max_width = 80
|
||||
if width is None:
|
||||
width = FORCED_WIDTH
|
||||
if width is None:
|
||||
width = max(min(get_terminal_size()[0], max_width) - 2, 50)
|
||||
self.width = width
|
||||
self.current_indent = 0
|
||||
self.buffer = []
|
||||
|
||||
def write(self, string):
|
||||
"""Writes a unicode string into the internal buffer."""
|
||||
self.buffer.append(string)
|
||||
|
||||
def indent(self):
|
||||
"""Increases the indentation."""
|
||||
self.current_indent += self.indent_increment
|
||||
|
||||
def dedent(self):
|
||||
"""Decreases the indentation."""
|
||||
self.current_indent -= self.indent_increment
|
||||
|
||||
def write_usage(self, prog, args="", prefix="Usage: "):
|
||||
"""Writes a usage line into the buffer.
|
||||
|
||||
:param prog: the program name.
|
||||
:param args: whitespace separated list of arguments.
|
||||
:param prefix: the prefix for the first line.
|
||||
"""
|
||||
usage_prefix = "{:>{w}}{} ".format(prefix, prog, w=self.current_indent)
|
||||
text_width = self.width - self.current_indent
|
||||
|
||||
if text_width >= (term_len(usage_prefix) + 20):
|
||||
# The arguments will fit to the right of the prefix.
|
||||
indent = " " * term_len(usage_prefix)
|
||||
self.write(
|
||||
wrap_text(
|
||||
args,
|
||||
text_width,
|
||||
initial_indent=usage_prefix,
|
||||
subsequent_indent=indent,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# The prefix is too long, put the arguments on the next line.
|
||||
self.write(usage_prefix)
|
||||
self.write("\n")
|
||||
indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
|
||||
self.write(
|
||||
wrap_text(
|
||||
args, text_width, initial_indent=indent, subsequent_indent=indent
|
||||
)
|
||||
)
|
||||
|
||||
self.write("\n")
|
||||
|
||||
def write_heading(self, heading):
|
||||
"""Writes a heading into the buffer."""
|
||||
self.write("{:>{w}}{}:\n".format("", heading, w=self.current_indent))
|
||||
|
||||
def write_paragraph(self):
|
||||
"""Writes a paragraph into the buffer."""
|
||||
if self.buffer:
|
||||
self.write("\n")
|
||||
|
||||
def write_text(self, text):
|
||||
"""Writes re-indented text into the buffer. This rewraps and
|
||||
preserves paragraphs.
|
||||
"""
|
||||
text_width = max(self.width - self.current_indent, 11)
|
||||
indent = " " * self.current_indent
|
||||
self.write(
|
||||
wrap_text(
|
||||
text,
|
||||
text_width,
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
preserve_paragraphs=True,
|
||||
)
|
||||
)
|
||||
self.write("\n")
|
||||
|
||||
def write_dl(self, rows, col_max=30, col_spacing=2):
|
||||
"""Writes a definition list into the buffer. This is how options
|
||||
and commands are usually formatted.
|
||||
|
||||
:param rows: a list of two item tuples for the terms and values.
|
||||
:param col_max: the maximum width of the first column.
|
||||
:param col_spacing: the number of spaces between the first and
|
||||
second column.
|
||||
"""
|
||||
rows = list(rows)
|
||||
widths = measure_table(rows)
|
||||
if len(widths) != 2:
|
||||
raise TypeError("Expected two columns for definition list")
|
||||
|
||||
first_col = min(widths[0], col_max) + col_spacing
|
||||
|
||||
for first, second in iter_rows(rows, len(widths)):
|
||||
self.write("{:>{w}}{}".format("", first, w=self.current_indent))
|
||||
if not second:
|
||||
self.write("\n")
|
||||
continue
|
||||
if term_len(first) <= first_col - col_spacing:
|
||||
self.write(" " * (first_col - term_len(first)))
|
||||
else:
|
||||
self.write("\n")
|
||||
self.write(" " * (first_col + self.current_indent))
|
||||
|
||||
text_width = max(self.width - first_col - 2, 10)
|
||||
wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
|
||||
lines = wrapped_text.splitlines()
|
||||
|
||||
if lines:
|
||||
self.write("{}\n".format(lines[0]))
|
||||
|
||||
for line in lines[1:]:
|
||||
self.write(
|
||||
"{:>{w}}{}\n".format(
|
||||
"", line, w=first_col + self.current_indent
|
||||
)
|
||||
)
|
||||
|
||||
if len(lines) > 1:
|
||||
# separate long help from next option
|
||||
self.write("\n")
|
||||
else:
|
||||
self.write("\n")
|
||||
|
||||
@contextmanager
|
||||
def section(self, name):
|
||||
"""Helpful context manager that writes a paragraph, a heading,
|
||||
and the indents.
|
||||
|
||||
:param name: the section name that is written as heading.
|
||||
"""
|
||||
self.write_paragraph()
|
||||
self.write_heading(name)
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
@contextmanager
|
||||
def indentation(self):
|
||||
"""A context manager that increases the indentation."""
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
def getvalue(self):
|
||||
"""Returns the buffer contents."""
|
||||
return "".join(self.buffer)
|
||||
|
||||
|
||||
def join_options(options):
|
||||
"""Given a list of option strings this joins them in the most appropriate
|
||||
way and returns them in the form ``(formatted_string,
|
||||
any_prefix_is_slash)`` where the second item in the tuple is a flag that
|
||||
indicates if any of the option prefixes was a slash.
|
||||
"""
|
||||
rv = []
|
||||
any_prefix_is_slash = False
|
||||
for opt in options:
|
||||
prefix = split_opt(opt)[0]
|
||||
if prefix == "/":
|
||||
any_prefix_is_slash = True
|
||||
rv.append((len(prefix), opt))
|
||||
|
||||
rv.sort(key=lambda x: x[0])
|
||||
|
||||
rv = ", ".join(x[1] for x in rv)
|
||||
return rv, any_prefix_is_slash
|
47
matteo_env/Lib/site-packages/click/globals.py
Normal file
47
matteo_env/Lib/site-packages/click/globals.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
from threading import local
|
||||
|
||||
_local = local()
|
||||
|
||||
|
||||
def get_current_context(silent=False):
|
||||
"""Returns the current click context. This can be used as a way to
|
||||
access the current context object from anywhere. This is a more implicit
|
||||
alternative to the :func:`pass_context` decorator. This function is
|
||||
primarily useful for helpers such as :func:`echo` which might be
|
||||
interested in changing its behavior based on the current context.
|
||||
|
||||
To push the current context, :meth:`Context.scope` can be used.
|
||||
|
||||
.. versionadded:: 5.0
|
||||
|
||||
:param silent: if set to `True` the return value is `None` if no context
|
||||
is available. The default behavior is to raise a
|
||||
:exc:`RuntimeError`.
|
||||
"""
|
||||
try:
|
||||
return _local.stack[-1]
|
||||
except (AttributeError, IndexError):
|
||||
if not silent:
|
||||
raise RuntimeError("There is no active click context.")
|
||||
|
||||
|
||||
def push_context(ctx):
|
||||
"""Pushes a new context to the current stack."""
|
||||
_local.__dict__.setdefault("stack", []).append(ctx)
|
||||
|
||||
|
||||
def pop_context():
|
||||
"""Removes the top level from the stack."""
|
||||
_local.stack.pop()
|
||||
|
||||
|
||||
def resolve_color_default(color=None):
|
||||
""""Internal helper to get the default value of the color flag. If a
|
||||
value is passed it's returned unchanged, otherwise it's looked up from
|
||||
the current context.
|
||||
"""
|
||||
if color is not None:
|
||||
return color
|
||||
ctx = get_current_context(silent=True)
|
||||
if ctx is not None:
|
||||
return ctx.color
|
428
matteo_env/Lib/site-packages/click/parser.py
Normal file
428
matteo_env/Lib/site-packages/click/parser.py
Normal file
|
@ -0,0 +1,428 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module started out as largely a copy paste from the stdlib's
|
||||
optparse module with the features removed that we do not need from
|
||||
optparse because we implement them in Click on a higher level (for
|
||||
instance type handling, help formatting and a lot more).
|
||||
|
||||
The plan is to remove more and more from here over time.
|
||||
|
||||
The reason this is a different module and not optparse from the stdlib
|
||||
is that there are differences in 2.x and 3.x about the error messages
|
||||
generated and optparse in the stdlib uses gettext for no good reason
|
||||
and might cause us issues.
|
||||
|
||||
Click uses parts of optparse written by Gregory P. Ward and maintained
|
||||
by the Python Software Foundation. This is limited to code in parser.py.
|
||||
|
||||
Copyright 2001-2006 Gregory P. Ward. All rights reserved.
|
||||
Copyright 2002-2006 Python Software Foundation. All rights reserved.
|
||||
"""
|
||||
import re
|
||||
from collections import deque
|
||||
|
||||
from .exceptions import BadArgumentUsage
|
||||
from .exceptions import BadOptionUsage
|
||||
from .exceptions import NoSuchOption
|
||||
from .exceptions import UsageError
|
||||
|
||||
|
||||
def _unpack_args(args, nargs_spec):
|
||||
"""Given an iterable of arguments and an iterable of nargs specifications,
|
||||
it returns a tuple with all the unpacked arguments at the first index
|
||||
and all remaining arguments as the second.
|
||||
|
||||
The nargs specification is the number of arguments that should be consumed
|
||||
or `-1` to indicate that this position should eat up all the remainders.
|
||||
|
||||
Missing items are filled with `None`.
|
||||
"""
|
||||
args = deque(args)
|
||||
nargs_spec = deque(nargs_spec)
|
||||
rv = []
|
||||
spos = None
|
||||
|
||||
def _fetch(c):
|
||||
try:
|
||||
if spos is None:
|
||||
return c.popleft()
|
||||
else:
|
||||
return c.pop()
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
while nargs_spec:
|
||||
nargs = _fetch(nargs_spec)
|
||||
if nargs == 1:
|
||||
rv.append(_fetch(args))
|
||||
elif nargs > 1:
|
||||
x = [_fetch(args) for _ in range(nargs)]
|
||||
# If we're reversed, we're pulling in the arguments in reverse,
|
||||
# so we need to turn them around.
|
||||
if spos is not None:
|
||||
x.reverse()
|
||||
rv.append(tuple(x))
|
||||
elif nargs < 0:
|
||||
if spos is not None:
|
||||
raise TypeError("Cannot have two nargs < 0")
|
||||
spos = len(rv)
|
||||
rv.append(None)
|
||||
|
||||
# spos is the position of the wildcard (star). If it's not `None`,
|
||||
# we fill it with the remainder.
|
||||
if spos is not None:
|
||||
rv[spos] = tuple(args)
|
||||
args = []
|
||||
rv[spos + 1 :] = reversed(rv[spos + 1 :])
|
||||
|
||||
return tuple(rv), list(args)
|
||||
|
||||
|
||||
def _error_opt_args(nargs, opt):
|
||||
if nargs == 1:
|
||||
raise BadOptionUsage(opt, "{} option requires an argument".format(opt))
|
||||
raise BadOptionUsage(opt, "{} option requires {} arguments".format(opt, nargs))
|
||||
|
||||
|
||||
def split_opt(opt):
|
||||
first = opt[:1]
|
||||
if first.isalnum():
|
||||
return "", opt
|
||||
if opt[1:2] == first:
|
||||
return opt[:2], opt[2:]
|
||||
return first, opt[1:]
|
||||
|
||||
|
||||
def normalize_opt(opt, ctx):
|
||||
if ctx is None or ctx.token_normalize_func is None:
|
||||
return opt
|
||||
prefix, opt = split_opt(opt)
|
||||
return prefix + ctx.token_normalize_func(opt)
|
||||
|
||||
|
||||
def split_arg_string(string):
|
||||
"""Given an argument string this attempts to split it into small parts."""
|
||||
rv = []
|
||||
for match in re.finditer(
|
||||
r"('([^'\\]*(?:\\.[^'\\]*)*)'|\"([^\"\\]*(?:\\.[^\"\\]*)*)\"|\S+)\s*",
|
||||
string,
|
||||
re.S,
|
||||
):
|
||||
arg = match.group().strip()
|
||||
if arg[:1] == arg[-1:] and arg[:1] in "\"'":
|
||||
arg = arg[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape")
|
||||
try:
|
||||
arg = type(string)(arg)
|
||||
except UnicodeError:
|
||||
pass
|
||||
rv.append(arg)
|
||||
return rv
|
||||
|
||||
|
||||
class Option(object):
|
||||
def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):
|
||||
self._short_opts = []
|
||||
self._long_opts = []
|
||||
self.prefixes = set()
|
||||
|
||||
for opt in opts:
|
||||
prefix, value = split_opt(opt)
|
||||
if not prefix:
|
||||
raise ValueError("Invalid start character for option ({})".format(opt))
|
||||
self.prefixes.add(prefix[0])
|
||||
if len(prefix) == 1 and len(value) == 1:
|
||||
self._short_opts.append(opt)
|
||||
else:
|
||||
self._long_opts.append(opt)
|
||||
self.prefixes.add(prefix)
|
||||
|
||||
if action is None:
|
||||
action = "store"
|
||||
|
||||
self.dest = dest
|
||||
self.action = action
|
||||
self.nargs = nargs
|
||||
self.const = const
|
||||
self.obj = obj
|
||||
|
||||
@property
|
||||
def takes_value(self):
|
||||
return self.action in ("store", "append")
|
||||
|
||||
def process(self, value, state):
|
||||
if self.action == "store":
|
||||
state.opts[self.dest] = value
|
||||
elif self.action == "store_const":
|
||||
state.opts[self.dest] = self.const
|
||||
elif self.action == "append":
|
||||
state.opts.setdefault(self.dest, []).append(value)
|
||||
elif self.action == "append_const":
|
||||
state.opts.setdefault(self.dest, []).append(self.const)
|
||||
elif self.action == "count":
|
||||
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1
|
||||
else:
|
||||
raise ValueError("unknown action '{}'".format(self.action))
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class Argument(object):
|
||||
def __init__(self, dest, nargs=1, obj=None):
|
||||
self.dest = dest
|
||||
self.nargs = nargs
|
||||
self.obj = obj
|
||||
|
||||
def process(self, value, state):
|
||||
if self.nargs > 1:
|
||||
holes = sum(1 for x in value if x is None)
|
||||
if holes == len(value):
|
||||
value = None
|
||||
elif holes != 0:
|
||||
raise BadArgumentUsage(
|
||||
"argument {} takes {} values".format(self.dest, self.nargs)
|
||||
)
|
||||
state.opts[self.dest] = value
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class ParsingState(object):
|
||||
def __init__(self, rargs):
|
||||
self.opts = {}
|
||||
self.largs = []
|
||||
self.rargs = rargs
|
||||
self.order = []
|
||||
|
||||
|
||||
class OptionParser(object):
|
||||
"""The option parser is an internal class that is ultimately used to
|
||||
parse options and arguments. It's modelled after optparse and brings
|
||||
a similar but vastly simplified API. It should generally not be used
|
||||
directly as the high level Click classes wrap it for you.
|
||||
|
||||
It's not nearly as extensible as optparse or argparse as it does not
|
||||
implement features that are implemented on a higher level (such as
|
||||
types or defaults).
|
||||
|
||||
:param ctx: optionally the :class:`~click.Context` where this parser
|
||||
should go with.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx=None):
|
||||
#: The :class:`~click.Context` for this parser. This might be
|
||||
#: `None` for some advanced use cases.
|
||||
self.ctx = ctx
|
||||
#: This controls how the parser deals with interspersed arguments.
|
||||
#: If this is set to `False`, the parser will stop on the first
|
||||
#: non-option. Click uses this to implement nested subcommands
|
||||
#: safely.
|
||||
self.allow_interspersed_args = True
|
||||
#: This tells the parser how to deal with unknown options. By
|
||||
#: default it will error out (which is sensible), but there is a
|
||||
#: second mode where it will ignore it and continue processing
|
||||
#: after shifting all the unknown options into the resulting args.
|
||||
self.ignore_unknown_options = False
|
||||
if ctx is not None:
|
||||
self.allow_interspersed_args = ctx.allow_interspersed_args
|
||||
self.ignore_unknown_options = ctx.ignore_unknown_options
|
||||
self._short_opt = {}
|
||||
self._long_opt = {}
|
||||
self._opt_prefixes = {"-", "--"}
|
||||
self._args = []
|
||||
|
||||
def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None):
|
||||
"""Adds a new option named `dest` to the parser. The destination
|
||||
is not inferred (unlike with optparse) and needs to be explicitly
|
||||
provided. Action can be any of ``store``, ``store_const``,
|
||||
``append``, ``appnd_const`` or ``count``.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
if obj is None:
|
||||
obj = dest
|
||||
opts = [normalize_opt(opt, self.ctx) for opt in opts]
|
||||
option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj)
|
||||
self._opt_prefixes.update(option.prefixes)
|
||||
for opt in option._short_opts:
|
||||
self._short_opt[opt] = option
|
||||
for opt in option._long_opts:
|
||||
self._long_opt[opt] = option
|
||||
|
||||
def add_argument(self, dest, nargs=1, obj=None):
|
||||
"""Adds a positional argument named `dest` to the parser.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
if obj is None:
|
||||
obj = dest
|
||||
self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
|
||||
|
||||
def parse_args(self, args):
|
||||
"""Parses positional arguments and returns ``(values, args, order)``
|
||||
for the parsed options and arguments as well as the leftover
|
||||
arguments if there are any. The order is a list of objects as they
|
||||
appear on the command line. If arguments appear multiple times they
|
||||
will be memorized multiple times as well.
|
||||
"""
|
||||
state = ParsingState(args)
|
||||
try:
|
||||
self._process_args_for_options(state)
|
||||
self._process_args_for_args(state)
|
||||
except UsageError:
|
||||
if self.ctx is None or not self.ctx.resilient_parsing:
|
||||
raise
|
||||
return state.opts, state.largs, state.order
|
||||
|
||||
def _process_args_for_args(self, state):
|
||||
pargs, args = _unpack_args(
|
||||
state.largs + state.rargs, [x.nargs for x in self._args]
|
||||
)
|
||||
|
||||
for idx, arg in enumerate(self._args):
|
||||
arg.process(pargs[idx], state)
|
||||
|
||||
state.largs = args
|
||||
state.rargs = []
|
||||
|
||||
def _process_args_for_options(self, state):
|
||||
while state.rargs:
|
||||
arg = state.rargs.pop(0)
|
||||
arglen = len(arg)
|
||||
# Double dashes always handled explicitly regardless of what
|
||||
# prefixes are valid.
|
||||
if arg == "--":
|
||||
return
|
||||
elif arg[:1] in self._opt_prefixes and arglen > 1:
|
||||
self._process_opts(arg, state)
|
||||
elif self.allow_interspersed_args:
|
||||
state.largs.append(arg)
|
||||
else:
|
||||
state.rargs.insert(0, arg)
|
||||
return
|
||||
|
||||
# Say this is the original argument list:
|
||||
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
|
||||
# ^
|
||||
# (we are about to process arg(i)).
|
||||
#
|
||||
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
|
||||
# [arg0, ..., arg(i-1)] (any options and their arguments will have
|
||||
# been removed from largs).
|
||||
#
|
||||
# The while loop will usually consume 1 or more arguments per pass.
|
||||
# If it consumes 1 (eg. arg is an option that takes no arguments),
|
||||
# then after _process_arg() is done the situation is:
|
||||
#
|
||||
# largs = subset of [arg0, ..., arg(i)]
|
||||
# rargs = [arg(i+1), ..., arg(N-1)]
|
||||
#
|
||||
# If allow_interspersed_args is false, largs will always be
|
||||
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
|
||||
# not a very interesting subset!
|
||||
|
||||
def _match_long_opt(self, opt, explicit_value, state):
|
||||
if opt not in self._long_opt:
|
||||
possibilities = [word for word in self._long_opt if word.startswith(opt)]
|
||||
raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
|
||||
|
||||
option = self._long_opt[opt]
|
||||
if option.takes_value:
|
||||
# At this point it's safe to modify rargs by injecting the
|
||||
# explicit value, because no exception is raised in this
|
||||
# branch. This means that the inserted value will be fully
|
||||
# consumed.
|
||||
if explicit_value is not None:
|
||||
state.rargs.insert(0, explicit_value)
|
||||
|
||||
nargs = option.nargs
|
||||
if len(state.rargs) < nargs:
|
||||
_error_opt_args(nargs, opt)
|
||||
elif nargs == 1:
|
||||
value = state.rargs.pop(0)
|
||||
else:
|
||||
value = tuple(state.rargs[:nargs])
|
||||
del state.rargs[:nargs]
|
||||
|
||||
elif explicit_value is not None:
|
||||
raise BadOptionUsage(opt, "{} option does not take a value".format(opt))
|
||||
|
||||
else:
|
||||
value = None
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
def _match_short_opt(self, arg, state):
|
||||
stop = False
|
||||
i = 1
|
||||
prefix = arg[0]
|
||||
unknown_options = []
|
||||
|
||||
for ch in arg[1:]:
|
||||
opt = normalize_opt(prefix + ch, self.ctx)
|
||||
option = self._short_opt.get(opt)
|
||||
i += 1
|
||||
|
||||
if not option:
|
||||
if self.ignore_unknown_options:
|
||||
unknown_options.append(ch)
|
||||
continue
|
||||
raise NoSuchOption(opt, ctx=self.ctx)
|
||||
if option.takes_value:
|
||||
# Any characters left in arg? Pretend they're the
|
||||
# next arg, and stop consuming characters of arg.
|
||||
if i < len(arg):
|
||||
state.rargs.insert(0, arg[i:])
|
||||
stop = True
|
||||
|
||||
nargs = option.nargs
|
||||
if len(state.rargs) < nargs:
|
||||
_error_opt_args(nargs, opt)
|
||||
elif nargs == 1:
|
||||
value = state.rargs.pop(0)
|
||||
else:
|
||||
value = tuple(state.rargs[:nargs])
|
||||
del state.rargs[:nargs]
|
||||
|
||||
else:
|
||||
value = None
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
if stop:
|
||||
break
|
||||
|
||||
# If we got any unknown options we re-combinate the string of the
|
||||
# remaining options and re-attach the prefix, then report that
|
||||
# to the state as new larg. This way there is basic combinatorics
|
||||
# that can be achieved while still ignoring unknown arguments.
|
||||
if self.ignore_unknown_options and unknown_options:
|
||||
state.largs.append("{}{}".format(prefix, "".join(unknown_options)))
|
||||
|
||||
def _process_opts(self, arg, state):
|
||||
explicit_value = None
|
||||
# Long option handling happens in two parts. The first part is
|
||||
# supporting explicitly attached values. In any case, we will try
|
||||
# to long match the option first.
|
||||
if "=" in arg:
|
||||
long_opt, explicit_value = arg.split("=", 1)
|
||||
else:
|
||||
long_opt = arg
|
||||
norm_long_opt = normalize_opt(long_opt, self.ctx)
|
||||
|
||||
# At this point we will match the (assumed) long option through
|
||||
# the long option matching code. Note that this allows options
|
||||
# like "-foo" to be matched as long options.
|
||||
try:
|
||||
self._match_long_opt(norm_long_opt, explicit_value, state)
|
||||
except NoSuchOption:
|
||||
# At this point the long option matching failed, and we need
|
||||
# to try with short options. However there is a special rule
|
||||
# which says, that if we have a two character options prefix
|
||||
# (applies to "--foo" for instance), we do not dispatch to the
|
||||
# short option code and will instead raise the no option
|
||||
# error.
|
||||
if arg[:2] not in self._opt_prefixes:
|
||||
return self._match_short_opt(arg, state)
|
||||
if not self.ignore_unknown_options:
|
||||
raise
|
||||
state.largs.append(arg)
|
681
matteo_env/Lib/site-packages/click/termui.py
Normal file
681
matteo_env/Lib/site-packages/click/termui.py
Normal file
|
@ -0,0 +1,681 @@
|
|||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from ._compat import DEFAULT_COLUMNS
|
||||
from ._compat import get_winterm_size
|
||||
from ._compat import isatty
|
||||
from ._compat import raw_input
|
||||
from ._compat import string_types
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import text_type
|
||||
from ._compat import WIN
|
||||
from .exceptions import Abort
|
||||
from .exceptions import UsageError
|
||||
from .globals import resolve_color_default
|
||||
from .types import Choice
|
||||
from .types import convert_type
|
||||
from .types import Path
|
||||
from .utils import echo
|
||||
from .utils import LazyFile
|
||||
|
||||
# The prompt functions to use. The doc tools currently override these
|
||||
# functions to customize how they work.
|
||||
visible_prompt_func = raw_input
|
||||
|
||||
_ansi_colors = {
|
||||
"black": 30,
|
||||
"red": 31,
|
||||
"green": 32,
|
||||
"yellow": 33,
|
||||
"blue": 34,
|
||||
"magenta": 35,
|
||||
"cyan": 36,
|
||||
"white": 37,
|
||||
"reset": 39,
|
||||
"bright_black": 90,
|
||||
"bright_red": 91,
|
||||
"bright_green": 92,
|
||||
"bright_yellow": 93,
|
||||
"bright_blue": 94,
|
||||
"bright_magenta": 95,
|
||||
"bright_cyan": 96,
|
||||
"bright_white": 97,
|
||||
}
|
||||
_ansi_reset_all = "\033[0m"
|
||||
|
||||
|
||||
def hidden_prompt_func(prompt):
|
||||
import getpass
|
||||
|
||||
return getpass.getpass(prompt)
|
||||
|
||||
|
||||
def _build_prompt(
|
||||
text, suffix, show_default=False, default=None, show_choices=True, type=None
|
||||
):
|
||||
prompt = text
|
||||
if type is not None and show_choices and isinstance(type, Choice):
|
||||
prompt += " ({})".format(", ".join(map(str, type.choices)))
|
||||
if default is not None and show_default:
|
||||
prompt = "{} [{}]".format(prompt, _format_default(default))
|
||||
return prompt + suffix
|
||||
|
||||
|
||||
def _format_default(default):
|
||||
if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
|
||||
return default.name
|
||||
|
||||
return default
|
||||
|
||||
|
||||
def prompt(
|
||||
text,
|
||||
default=None,
|
||||
hide_input=False,
|
||||
confirmation_prompt=False,
|
||||
type=None,
|
||||
value_proc=None,
|
||||
prompt_suffix=": ",
|
||||
show_default=True,
|
||||
err=False,
|
||||
show_choices=True,
|
||||
):
|
||||
"""Prompts a user for input. This is a convenience function that can
|
||||
be used to prompt a user for input later.
|
||||
|
||||
If the user aborts the input by sending a interrupt signal, this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
.. versionadded:: 7.0
|
||||
Added the show_choices parameter.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
Added unicode support for cmd.exe on Windows.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param text: the text to show for the prompt.
|
||||
:param default: the default value to use if no input happens. If this
|
||||
is not given it will prompt until it's aborted.
|
||||
:param hide_input: if this is set to true then the input value will
|
||||
be hidden.
|
||||
:param confirmation_prompt: asks for confirmation for the value.
|
||||
:param type: the type to use to check the value against.
|
||||
:param value_proc: if this parameter is provided it's a function that
|
||||
is invoked instead of the type conversion to
|
||||
convert a value.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
:param show_choices: Show or hide choices if the passed type is a Choice.
|
||||
For example if type is a Choice of either day or week,
|
||||
show_choices is true and text is "Group by" then the
|
||||
prompt will be "Group by (day, week): ".
|
||||
"""
|
||||
result = None
|
||||
|
||||
def prompt_func(text):
|
||||
f = hidden_prompt_func if hide_input else visible_prompt_func
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(text, nl=False, err=err)
|
||||
return f("")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
# getpass doesn't print a newline if the user aborts input with ^C.
|
||||
# Allegedly this behavior is inherited from getpass(3).
|
||||
# A doc bug has been filed at https://bugs.python.org/issue24711
|
||||
if hide_input:
|
||||
echo(None, err=err)
|
||||
raise Abort()
|
||||
|
||||
if value_proc is None:
|
||||
value_proc = convert_type(type, default)
|
||||
|
||||
prompt = _build_prompt(
|
||||
text, prompt_suffix, show_default, default, show_choices, type
|
||||
)
|
||||
|
||||
while 1:
|
||||
while 1:
|
||||
value = prompt_func(prompt)
|
||||
if value:
|
||||
break
|
||||
elif default is not None:
|
||||
if isinstance(value_proc, Path):
|
||||
# validate Path default value(exists, dir_okay etc.)
|
||||
value = default
|
||||
break
|
||||
return default
|
||||
try:
|
||||
result = value_proc(value)
|
||||
except UsageError as e:
|
||||
echo("Error: {}".format(e.message), err=err) # noqa: B306
|
||||
continue
|
||||
if not confirmation_prompt:
|
||||
return result
|
||||
while 1:
|
||||
value2 = prompt_func("Repeat for confirmation: ")
|
||||
if value2:
|
||||
break
|
||||
if value == value2:
|
||||
return result
|
||||
echo("Error: the two entered values do not match", err=err)
|
||||
|
||||
|
||||
def confirm(
|
||||
text, default=False, abort=False, prompt_suffix=": ", show_default=True, err=False
|
||||
):
|
||||
"""Prompts for confirmation (yes/no question).
|
||||
|
||||
If the user aborts the input by sending a interrupt signal this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param text: the question to ask.
|
||||
:param default: the default for the prompt.
|
||||
:param abort: if this is set to `True` a negative answer aborts the
|
||||
exception by raising :exc:`Abort`.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
"""
|
||||
prompt = _build_prompt(
|
||||
text, prompt_suffix, show_default, "Y/n" if default else "y/N"
|
||||
)
|
||||
while 1:
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(prompt, nl=False, err=err)
|
||||
value = visible_prompt_func("").lower().strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
raise Abort()
|
||||
if value in ("y", "yes"):
|
||||
rv = True
|
||||
elif value in ("n", "no"):
|
||||
rv = False
|
||||
elif value == "":
|
||||
rv = default
|
||||
else:
|
||||
echo("Error: invalid input", err=err)
|
||||
continue
|
||||
break
|
||||
if abort and not rv:
|
||||
raise Abort()
|
||||
return rv
|
||||
|
||||
|
||||
def get_terminal_size():
|
||||
"""Returns the current size of the terminal as tuple in the form
|
||||
``(width, height)`` in columns and rows.
|
||||
"""
|
||||
# If shutil has get_terminal_size() (Python 3.3 and later) use that
|
||||
if sys.version_info >= (3, 3):
|
||||
import shutil
|
||||
|
||||
shutil_get_terminal_size = getattr(shutil, "get_terminal_size", None)
|
||||
if shutil_get_terminal_size:
|
||||
sz = shutil_get_terminal_size()
|
||||
return sz.columns, sz.lines
|
||||
|
||||
# We provide a sensible default for get_winterm_size() when being invoked
|
||||
# inside a subprocess. Without this, it would not provide a useful input.
|
||||
if get_winterm_size is not None:
|
||||
size = get_winterm_size()
|
||||
if size == (0, 0):
|
||||
return (79, 24)
|
||||
else:
|
||||
return size
|
||||
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
except Exception:
|
||||
return
|
||||
return cr
|
||||
|
||||
cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
try:
|
||||
cr = ioctl_gwinsz(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
except Exception:
|
||||
pass
|
||||
if not cr or not cr[0] or not cr[1]:
|
||||
cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", DEFAULT_COLUMNS))
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
def echo_via_pager(text_or_generator, color=None):
|
||||
"""This function takes a text and shows it via an environment specific
|
||||
pager on stdout.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Added the `color` flag.
|
||||
|
||||
:param text_or_generator: the text to page, or alternatively, a
|
||||
generator emitting the text to page.
|
||||
:param color: controls if the pager supports ANSI colors or not. The
|
||||
default is autodetection.
|
||||
"""
|
||||
color = resolve_color_default(color)
|
||||
|
||||
if inspect.isgeneratorfunction(text_or_generator):
|
||||
i = text_or_generator()
|
||||
elif isinstance(text_or_generator, string_types):
|
||||
i = [text_or_generator]
|
||||
else:
|
||||
i = iter(text_or_generator)
|
||||
|
||||
# convert every element of i to a text type if necessary
|
||||
text_generator = (el if isinstance(el, string_types) else text_type(el) for el in i)
|
||||
|
||||
from ._termui_impl import pager
|
||||
|
||||
return pager(itertools.chain(text_generator, "\n"), color)
|
||||
|
||||
|
||||
def progressbar(
|
||||
iterable=None,
|
||||
length=None,
|
||||
label=None,
|
||||
show_eta=True,
|
||||
show_percent=None,
|
||||
show_pos=False,
|
||||
item_show_func=None,
|
||||
fill_char="#",
|
||||
empty_char="-",
|
||||
bar_template="%(label)s [%(bar)s] %(info)s",
|
||||
info_sep=" ",
|
||||
width=36,
|
||||
file=None,
|
||||
color=None,
|
||||
):
|
||||
"""This function creates an iterable context manager that can be used
|
||||
to iterate over something while showing a progress bar. It will
|
||||
either iterate over the `iterable` or `length` items (that are counted
|
||||
up). While iteration happens, this function will print a rendered
|
||||
progress bar to the given `file` (defaults to stdout) and will attempt
|
||||
to calculate remaining time and more. By default, this progress bar
|
||||
will not be rendered if the file is not a terminal.
|
||||
|
||||
The context manager creates the progress bar. When the context
|
||||
manager is entered the progress bar is already created. With every
|
||||
iteration over the progress bar, the iterable passed to the bar is
|
||||
advanced and the bar is updated. When the context manager exits,
|
||||
a newline is printed and the progress bar is finalized on screen.
|
||||
|
||||
Note: The progress bar is currently designed for use cases where the
|
||||
total progress can be expected to take at least several seconds.
|
||||
Because of this, the ProgressBar class object won't display
|
||||
progress that is considered too fast, and progress where the time
|
||||
between steps is less than a second.
|
||||
|
||||
No printing must happen or the progress bar will be unintentionally
|
||||
destroyed.
|
||||
|
||||
Example usage::
|
||||
|
||||
with progressbar(items) as bar:
|
||||
for item in bar:
|
||||
do_something_with(item)
|
||||
|
||||
Alternatively, if no iterable is specified, one can manually update the
|
||||
progress bar through the `update()` method instead of directly
|
||||
iterating over the progress bar. The update method accepts the number
|
||||
of steps to increment the bar with::
|
||||
|
||||
with progressbar(length=chunks.total_bytes) as bar:
|
||||
for chunk in chunks:
|
||||
process_chunk(chunk)
|
||||
bar.update(chunks.bytes)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `color` parameter. Added a `update` method to the
|
||||
progressbar object.
|
||||
|
||||
:param iterable: an iterable to iterate over. If not provided the length
|
||||
is required.
|
||||
:param length: the number of items to iterate over. By default the
|
||||
progressbar will attempt to ask the iterator about its
|
||||
length, which might or might not work. If an iterable is
|
||||
also provided this parameter can be used to override the
|
||||
length. If an iterable is not provided the progress bar
|
||||
will iterate over a range of that length.
|
||||
:param label: the label to show next to the progress bar.
|
||||
:param show_eta: enables or disables the estimated time display. This is
|
||||
automatically disabled if the length cannot be
|
||||
determined.
|
||||
:param show_percent: enables or disables the percentage display. The
|
||||
default is `True` if the iterable has a length or
|
||||
`False` if not.
|
||||
:param show_pos: enables or disables the absolute position display. The
|
||||
default is `False`.
|
||||
:param item_show_func: a function called with the current item which
|
||||
can return a string to show the current item
|
||||
next to the progress bar. Note that the current
|
||||
item can be `None`!
|
||||
:param fill_char: the character to use to show the filled part of the
|
||||
progress bar.
|
||||
:param empty_char: the character to use to show the non-filled part of
|
||||
the progress bar.
|
||||
:param bar_template: the format string to use as template for the bar.
|
||||
The parameters in it are ``label`` for the label,
|
||||
``bar`` for the progress bar and ``info`` for the
|
||||
info section.
|
||||
:param info_sep: the separator between multiple info items (eta etc.)
|
||||
:param width: the width of the progress bar in characters, 0 means full
|
||||
terminal width
|
||||
:param file: the file to write to. If this is not a terminal then
|
||||
only the label is printed.
|
||||
:param color: controls if the terminal supports ANSI colors or not. The
|
||||
default is autodetection. This is only needed if ANSI
|
||||
codes are included anywhere in the progress bar output
|
||||
which is not the case by default.
|
||||
"""
|
||||
from ._termui_impl import ProgressBar
|
||||
|
||||
color = resolve_color_default(color)
|
||||
return ProgressBar(
|
||||
iterable=iterable,
|
||||
length=length,
|
||||
show_eta=show_eta,
|
||||
show_percent=show_percent,
|
||||
show_pos=show_pos,
|
||||
item_show_func=item_show_func,
|
||||
fill_char=fill_char,
|
||||
empty_char=empty_char,
|
||||
bar_template=bar_template,
|
||||
info_sep=info_sep,
|
||||
file=file,
|
||||
label=label,
|
||||
width=width,
|
||||
color=color,
|
||||
)
|
||||
|
||||
|
||||
def clear():
|
||||
"""Clears the terminal screen. This will have the effect of clearing
|
||||
the whole visible space of the terminal and moving the cursor to the
|
||||
top left. This does not do anything if not connected to a terminal.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isatty(sys.stdout):
|
||||
return
|
||||
# If we're on Windows and we don't have colorama available, then we
|
||||
# clear the screen by shelling out. Otherwise we can use an escape
|
||||
# sequence.
|
||||
if WIN:
|
||||
os.system("cls")
|
||||
else:
|
||||
sys.stdout.write("\033[2J\033[1;1H")
|
||||
|
||||
|
||||
def style(
|
||||
text,
|
||||
fg=None,
|
||||
bg=None,
|
||||
bold=None,
|
||||
dim=None,
|
||||
underline=None,
|
||||
blink=None,
|
||||
reverse=None,
|
||||
reset=True,
|
||||
):
|
||||
"""Styles a text with ANSI styles and returns the new string. By
|
||||
default the styling is self contained which means that at the end
|
||||
of the string a reset code is issued. This can be prevented by
|
||||
passing ``reset=False``.
|
||||
|
||||
Examples::
|
||||
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
click.echo(click.style('ATTENTION!', blink=True))
|
||||
click.echo(click.style('Some things', reverse=True, fg='cyan'))
|
||||
|
||||
Supported color names:
|
||||
|
||||
* ``black`` (might be a gray)
|
||||
* ``red``
|
||||
* ``green``
|
||||
* ``yellow`` (might be an orange)
|
||||
* ``blue``
|
||||
* ``magenta``
|
||||
* ``cyan``
|
||||
* ``white`` (might be light gray)
|
||||
* ``bright_black``
|
||||
* ``bright_red``
|
||||
* ``bright_green``
|
||||
* ``bright_yellow``
|
||||
* ``bright_blue``
|
||||
* ``bright_magenta``
|
||||
* ``bright_cyan``
|
||||
* ``bright_white``
|
||||
* ``reset`` (reset the color code only)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 7.0
|
||||
Added support for bright colors.
|
||||
|
||||
:param text: the string to style with ansi codes.
|
||||
:param fg: if provided this will become the foreground color.
|
||||
:param bg: if provided this will become the background color.
|
||||
:param bold: if provided this will enable or disable bold mode.
|
||||
:param dim: if provided this will enable or disable dim mode. This is
|
||||
badly supported.
|
||||
:param underline: if provided this will enable or disable underline.
|
||||
:param blink: if provided this will enable or disable blinking.
|
||||
:param reverse: if provided this will enable or disable inverse
|
||||
rendering (foreground becomes background and the
|
||||
other way round).
|
||||
:param reset: by default a reset-all code is added at the end of the
|
||||
string which means that styles do not carry over. This
|
||||
can be disabled to compose styles.
|
||||
"""
|
||||
bits = []
|
||||
if fg:
|
||||
try:
|
||||
bits.append("\033[{}m".format(_ansi_colors[fg]))
|
||||
except KeyError:
|
||||
raise TypeError("Unknown color '{}'".format(fg))
|
||||
if bg:
|
||||
try:
|
||||
bits.append("\033[{}m".format(_ansi_colors[bg] + 10))
|
||||
except KeyError:
|
||||
raise TypeError("Unknown color '{}'".format(bg))
|
||||
if bold is not None:
|
||||
bits.append("\033[{}m".format(1 if bold else 22))
|
||||
if dim is not None:
|
||||
bits.append("\033[{}m".format(2 if dim else 22))
|
||||
if underline is not None:
|
||||
bits.append("\033[{}m".format(4 if underline else 24))
|
||||
if blink is not None:
|
||||
bits.append("\033[{}m".format(5 if blink else 25))
|
||||
if reverse is not None:
|
||||
bits.append("\033[{}m".format(7 if reverse else 27))
|
||||
bits.append(text)
|
||||
if reset:
|
||||
bits.append(_ansi_reset_all)
|
||||
return "".join(bits)
|
||||
|
||||
|
||||
def unstyle(text):
|
||||
"""Removes ANSI styling information from a string. Usually it's not
|
||||
necessary to use this function as Click's echo function will
|
||||
automatically remove styling if necessary.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param text: the text to remove style information from.
|
||||
"""
|
||||
return strip_ansi(text)
|
||||
|
||||
|
||||
def secho(message=None, file=None, nl=True, err=False, color=None, **styles):
|
||||
"""This function combines :func:`echo` and :func:`style` into one
|
||||
call. As such the following two calls are the same::
|
||||
|
||||
click.secho('Hello World!', fg='green')
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
|
||||
All keyword arguments are forwarded to the underlying functions
|
||||
depending on which one they go with.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if message is not None:
|
||||
message = style(message, **styles)
|
||||
return echo(message, file=file, nl=nl, err=err, color=color)
|
||||
|
||||
|
||||
def edit(
|
||||
text=None, editor=None, env=None, require_save=True, extension=".txt", filename=None
|
||||
):
|
||||
r"""Edits the given text in the defined editor. If an editor is given
|
||||
(should be the full path to the executable but the regular operating
|
||||
system search path is used for finding the executable) it overrides
|
||||
the detected editor. Optionally, some environment variables can be
|
||||
used. If the editor is closed without changes, `None` is returned. In
|
||||
case a file is edited directly the return value is always `None` and
|
||||
`require_save` and `extension` are ignored.
|
||||
|
||||
If the editor cannot be opened a :exc:`UsageError` is raised.
|
||||
|
||||
Note for Windows: to simplify cross-platform usage, the newlines are
|
||||
automatically converted from POSIX to Windows and vice versa. As such,
|
||||
the message here will have ``\n`` as newline markers.
|
||||
|
||||
:param text: the text to edit.
|
||||
:param editor: optionally the editor to use. Defaults to automatic
|
||||
detection.
|
||||
:param env: environment variables to forward to the editor.
|
||||
:param require_save: if this is true, then not saving in the editor
|
||||
will make the return value become `None`.
|
||||
:param extension: the extension to tell the editor about. This defaults
|
||||
to `.txt` but changing this might change syntax
|
||||
highlighting.
|
||||
:param filename: if provided it will edit this file instead of the
|
||||
provided text contents. It will not use a temporary
|
||||
file as an indirection in that case.
|
||||
"""
|
||||
from ._termui_impl import Editor
|
||||
|
||||
editor = Editor(
|
||||
editor=editor, env=env, require_save=require_save, extension=extension
|
||||
)
|
||||
if filename is None:
|
||||
return editor.edit(text)
|
||||
editor.edit_file(filename)
|
||||
|
||||
|
||||
def launch(url, wait=False, locate=False):
|
||||
"""This function launches the given URL (or filename) in the default
|
||||
viewer application for this file type. If this is an executable, it
|
||||
might launch the executable in a new session. The return value is
|
||||
the exit code of the launched application. Usually, ``0`` indicates
|
||||
success.
|
||||
|
||||
Examples::
|
||||
|
||||
click.launch('https://click.palletsprojects.com/')
|
||||
click.launch('/my/downloaded/file', locate=True)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param url: URL or filename of the thing to launch.
|
||||
:param wait: waits for the program to stop.
|
||||
:param locate: if this is set to `True` then instead of launching the
|
||||
application associated with the URL it will attempt to
|
||||
launch a file manager with the file located. This
|
||||
might have weird effects if the URL does not point to
|
||||
the filesystem.
|
||||
"""
|
||||
from ._termui_impl import open_url
|
||||
|
||||
return open_url(url, wait=wait, locate=locate)
|
||||
|
||||
|
||||
# If this is provided, getchar() calls into this instead. This is used
|
||||
# for unittesting purposes.
|
||||
_getchar = None
|
||||
|
||||
|
||||
def getchar(echo=False):
|
||||
"""Fetches a single character from the terminal and returns it. This
|
||||
will always return a unicode character and under certain rare
|
||||
circumstances this might return more than one character. The
|
||||
situations which more than one character is returned is when for
|
||||
whatever reason multiple characters end up in the terminal buffer or
|
||||
standard input was not actually a terminal.
|
||||
|
||||
Note that this will always read from the terminal, even if something
|
||||
is piped into the standard input.
|
||||
|
||||
Note for Windows: in rare cases when typing non-ASCII characters, this
|
||||
function might wait for a second character and then return both at once.
|
||||
This is because certain Unicode characters look like special-key markers.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param echo: if set to `True`, the character read will also show up on
|
||||
the terminal. The default is to not show it.
|
||||
"""
|
||||
f = _getchar
|
||||
if f is None:
|
||||
from ._termui_impl import getchar as f
|
||||
return f(echo)
|
||||
|
||||
|
||||
def raw_terminal():
|
||||
from ._termui_impl import raw_terminal as f
|
||||
|
||||
return f()
|
||||
|
||||
|
||||
def pause(info="Press any key to continue ...", err=False):
|
||||
"""This command stops execution and waits for the user to press any
|
||||
key to continue. This is similar to the Windows batch "pause"
|
||||
command. If the program is not run through a terminal, this command
|
||||
will instead do nothing.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param info: the info string to print before pausing.
|
||||
:param err: if set to message goes to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
"""
|
||||
if not isatty(sys.stdin) or not isatty(sys.stdout):
|
||||
return
|
||||
try:
|
||||
if info:
|
||||
echo(info, nl=False, err=err)
|
||||
try:
|
||||
getchar()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
pass
|
||||
finally:
|
||||
if info:
|
||||
echo(err=err)
|
382
matteo_env/Lib/site-packages/click/testing.py
Normal file
382
matteo_env/Lib/site-packages/click/testing.py
Normal file
|
@ -0,0 +1,382 @@
|
|||
import contextlib
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from . import formatting
|
||||
from . import termui
|
||||
from . import utils
|
||||
from ._compat import iteritems
|
||||
from ._compat import PY2
|
||||
from ._compat import string_types
|
||||
|
||||
|
||||
if PY2:
|
||||
from cStringIO import StringIO
|
||||
else:
|
||||
import io
|
||||
from ._compat import _find_binary_reader
|
||||
|
||||
|
||||
class EchoingStdin(object):
|
||||
def __init__(self, input, output):
|
||||
self._input = input
|
||||
self._output = output
|
||||
|
||||
def __getattr__(self, x):
|
||||
return getattr(self._input, x)
|
||||
|
||||
def _echo(self, rv):
|
||||
self._output.write(rv)
|
||||
return rv
|
||||
|
||||
def read(self, n=-1):
|
||||
return self._echo(self._input.read(n))
|
||||
|
||||
def readline(self, n=-1):
|
||||
return self._echo(self._input.readline(n))
|
||||
|
||||
def readlines(self):
|
||||
return [self._echo(x) for x in self._input.readlines()]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._echo(x) for x in self._input)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._input)
|
||||
|
||||
|
||||
def make_input_stream(input, charset):
|
||||
# Is already an input stream.
|
||||
if hasattr(input, "read"):
|
||||
if PY2:
|
||||
return input
|
||||
rv = _find_binary_reader(input)
|
||||
if rv is not None:
|
||||
return rv
|
||||
raise TypeError("Could not find binary reader for input stream.")
|
||||
|
||||
if input is None:
|
||||
input = b""
|
||||
elif not isinstance(input, bytes):
|
||||
input = input.encode(charset)
|
||||
if PY2:
|
||||
return StringIO(input)
|
||||
return io.BytesIO(input)
|
||||
|
||||
|
||||
class Result(object):
|
||||
"""Holds the captured result of an invoked CLI script."""
|
||||
|
||||
def __init__(
|
||||
self, runner, stdout_bytes, stderr_bytes, exit_code, exception, exc_info=None
|
||||
):
|
||||
#: The runner that created the result
|
||||
self.runner = runner
|
||||
#: The standard output as bytes.
|
||||
self.stdout_bytes = stdout_bytes
|
||||
#: The standard error as bytes, or None if not available
|
||||
self.stderr_bytes = stderr_bytes
|
||||
#: The exit code as integer.
|
||||
self.exit_code = exit_code
|
||||
#: The exception that happened if one did.
|
||||
self.exception = exception
|
||||
#: The traceback
|
||||
self.exc_info = exc_info
|
||||
|
||||
@property
|
||||
def output(self):
|
||||
"""The (standard) output as unicode string."""
|
||||
return self.stdout
|
||||
|
||||
@property
|
||||
def stdout(self):
|
||||
"""The standard output as unicode string."""
|
||||
return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def stderr(self):
|
||||
"""The standard error as unicode string."""
|
||||
if self.stderr_bytes is None:
|
||||
raise ValueError("stderr not separately captured")
|
||||
return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {}>".format(
|
||||
type(self).__name__, repr(self.exception) if self.exception else "okay"
|
||||
)
|
||||
|
||||
|
||||
class CliRunner(object):
|
||||
"""The CLI runner provides functionality to invoke a Click command line
|
||||
script for unittesting purposes in a isolated environment. This only
|
||||
works in single-threaded systems without any concurrency as it changes the
|
||||
global interpreter state.
|
||||
|
||||
:param charset: the character set for the input and output data. This is
|
||||
UTF-8 by default and should not be changed currently as
|
||||
the reporting to Click only works in Python 2 properly.
|
||||
:param env: a dictionary with environment variables for overriding.
|
||||
:param echo_stdin: if this is set to `True`, then reading from stdin writes
|
||||
to stdout. This is useful for showing examples in
|
||||
some circumstances. Note that regular prompts
|
||||
will automatically echo the input.
|
||||
:param mix_stderr: if this is set to `False`, then stdout and stderr are
|
||||
preserved as independent streams. This is useful for
|
||||
Unix-philosophy apps that have predictable stdout and
|
||||
noisy stderr, such that each may be measured
|
||||
independently
|
||||
"""
|
||||
|
||||
def __init__(self, charset=None, env=None, echo_stdin=False, mix_stderr=True):
|
||||
if charset is None:
|
||||
charset = "utf-8"
|
||||
self.charset = charset
|
||||
self.env = env or {}
|
||||
self.echo_stdin = echo_stdin
|
||||
self.mix_stderr = mix_stderr
|
||||
|
||||
def get_default_prog_name(self, cli):
|
||||
"""Given a command object it will return the default program name
|
||||
for it. The default is the `name` attribute or ``"root"`` if not
|
||||
set.
|
||||
"""
|
||||
return cli.name or "root"
|
||||
|
||||
def make_env(self, overrides=None):
|
||||
"""Returns the environment overrides for invoking a script."""
|
||||
rv = dict(self.env)
|
||||
if overrides:
|
||||
rv.update(overrides)
|
||||
return rv
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolation(self, input=None, env=None, color=False):
|
||||
"""A context manager that sets up the isolation for invoking of a
|
||||
command line tool. This sets up stdin with the given input data
|
||||
and `os.environ` with the overrides from the given dictionary.
|
||||
This also rebinds some internals in Click to be mocked (like the
|
||||
prompt functionality).
|
||||
|
||||
This is automatically done in the :meth:`invoke` method.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
The ``color`` parameter was added.
|
||||
|
||||
:param input: the input stream to put into sys.stdin.
|
||||
:param env: the environment overrides as dictionary.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
"""
|
||||
input = make_input_stream(input, self.charset)
|
||||
|
||||
old_stdin = sys.stdin
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
old_forced_width = formatting.FORCED_WIDTH
|
||||
formatting.FORCED_WIDTH = 80
|
||||
|
||||
env = self.make_env(env)
|
||||
|
||||
if PY2:
|
||||
bytes_output = StringIO()
|
||||
if self.echo_stdin:
|
||||
input = EchoingStdin(input, bytes_output)
|
||||
sys.stdout = bytes_output
|
||||
if not self.mix_stderr:
|
||||
bytes_error = StringIO()
|
||||
sys.stderr = bytes_error
|
||||
else:
|
||||
bytes_output = io.BytesIO()
|
||||
if self.echo_stdin:
|
||||
input = EchoingStdin(input, bytes_output)
|
||||
input = io.TextIOWrapper(input, encoding=self.charset)
|
||||
sys.stdout = io.TextIOWrapper(bytes_output, encoding=self.charset)
|
||||
if not self.mix_stderr:
|
||||
bytes_error = io.BytesIO()
|
||||
sys.stderr = io.TextIOWrapper(bytes_error, encoding=self.charset)
|
||||
|
||||
if self.mix_stderr:
|
||||
sys.stderr = sys.stdout
|
||||
|
||||
sys.stdin = input
|
||||
|
||||
def visible_input(prompt=None):
|
||||
sys.stdout.write(prompt or "")
|
||||
val = input.readline().rstrip("\r\n")
|
||||
sys.stdout.write("{}\n".format(val))
|
||||
sys.stdout.flush()
|
||||
return val
|
||||
|
||||
def hidden_input(prompt=None):
|
||||
sys.stdout.write("{}\n".format(prompt or ""))
|
||||
sys.stdout.flush()
|
||||
return input.readline().rstrip("\r\n")
|
||||
|
||||
def _getchar(echo):
|
||||
char = sys.stdin.read(1)
|
||||
if echo:
|
||||
sys.stdout.write(char)
|
||||
sys.stdout.flush()
|
||||
return char
|
||||
|
||||
default_color = color
|
||||
|
||||
def should_strip_ansi(stream=None, color=None):
|
||||
if color is None:
|
||||
return not default_color
|
||||
return not color
|
||||
|
||||
old_visible_prompt_func = termui.visible_prompt_func
|
||||
old_hidden_prompt_func = termui.hidden_prompt_func
|
||||
old__getchar_func = termui._getchar
|
||||
old_should_strip_ansi = utils.should_strip_ansi
|
||||
termui.visible_prompt_func = visible_input
|
||||
termui.hidden_prompt_func = hidden_input
|
||||
termui._getchar = _getchar
|
||||
utils.should_strip_ansi = should_strip_ansi
|
||||
|
||||
old_env = {}
|
||||
try:
|
||||
for key, value in iteritems(env):
|
||||
old_env[key] = os.environ.get(key)
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
yield (bytes_output, not self.mix_stderr and bytes_error)
|
||||
finally:
|
||||
for key, value in iteritems(old_env):
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
sys.stdin = old_stdin
|
||||
termui.visible_prompt_func = old_visible_prompt_func
|
||||
termui.hidden_prompt_func = old_hidden_prompt_func
|
||||
termui._getchar = old__getchar_func
|
||||
utils.should_strip_ansi = old_should_strip_ansi
|
||||
formatting.FORCED_WIDTH = old_forced_width
|
||||
|
||||
def invoke(
|
||||
self,
|
||||
cli,
|
||||
args=None,
|
||||
input=None,
|
||||
env=None,
|
||||
catch_exceptions=True,
|
||||
color=False,
|
||||
**extra
|
||||
):
|
||||
"""Invokes a command in an isolated environment. The arguments are
|
||||
forwarded directly to the command line script, the `extra` keyword
|
||||
arguments are passed to the :meth:`~clickpkg.Command.main` function of
|
||||
the command.
|
||||
|
||||
This returns a :class:`Result` object.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
The ``catch_exceptions`` parameter was added.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The result object now has an `exc_info` attribute with the
|
||||
traceback if available.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
The ``color`` parameter was added.
|
||||
|
||||
:param cli: the command to invoke
|
||||
:param args: the arguments to invoke. It may be given as an iterable
|
||||
or a string. When given as string it will be interpreted
|
||||
as a Unix shell command. More details at
|
||||
:func:`shlex.split`.
|
||||
:param input: the input data for `sys.stdin`.
|
||||
:param env: the environment overrides.
|
||||
:param catch_exceptions: Whether to catch any other exceptions than
|
||||
``SystemExit``.
|
||||
:param extra: the keyword arguments to pass to :meth:`main`.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
"""
|
||||
exc_info = None
|
||||
with self.isolation(input=input, env=env, color=color) as outstreams:
|
||||
exception = None
|
||||
exit_code = 0
|
||||
|
||||
if isinstance(args, string_types):
|
||||
args = shlex.split(args)
|
||||
|
||||
try:
|
||||
prog_name = extra.pop("prog_name")
|
||||
except KeyError:
|
||||
prog_name = self.get_default_prog_name(cli)
|
||||
|
||||
try:
|
||||
cli.main(args=args or (), prog_name=prog_name, **extra)
|
||||
except SystemExit as e:
|
||||
exc_info = sys.exc_info()
|
||||
exit_code = e.code
|
||||
if exit_code is None:
|
||||
exit_code = 0
|
||||
|
||||
if exit_code != 0:
|
||||
exception = e
|
||||
|
||||
if not isinstance(exit_code, int):
|
||||
sys.stdout.write(str(exit_code))
|
||||
sys.stdout.write("\n")
|
||||
exit_code = 1
|
||||
|
||||
except Exception as e:
|
||||
if not catch_exceptions:
|
||||
raise
|
||||
exception = e
|
||||
exit_code = 1
|
||||
exc_info = sys.exc_info()
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
stdout = outstreams[0].getvalue()
|
||||
if self.mix_stderr:
|
||||
stderr = None
|
||||
else:
|
||||
stderr = outstreams[1].getvalue()
|
||||
|
||||
return Result(
|
||||
runner=self,
|
||||
stdout_bytes=stdout,
|
||||
stderr_bytes=stderr,
|
||||
exit_code=exit_code,
|
||||
exception=exception,
|
||||
exc_info=exc_info,
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolated_filesystem(self):
|
||||
"""A context manager that creates a temporary folder and changes
|
||||
the current working directory to it for isolated filesystem tests.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
t = tempfile.mkdtemp()
|
||||
os.chdir(t)
|
||||
try:
|
||||
yield t
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
try:
|
||||
shutil.rmtree(t)
|
||||
except (OSError, IOError): # noqa: B014
|
||||
pass
|
762
matteo_env/Lib/site-packages/click/types.py
Normal file
762
matteo_env/Lib/site-packages/click/types.py
Normal file
|
@ -0,0 +1,762 @@
|
|||
import os
|
||||
import stat
|
||||
from datetime import datetime
|
||||
|
||||
from ._compat import _get_argv_encoding
|
||||
from ._compat import filename_to_ui
|
||||
from ._compat import get_filesystem_encoding
|
||||
from ._compat import get_streerror
|
||||
from ._compat import open_stream
|
||||
from ._compat import PY2
|
||||
from ._compat import text_type
|
||||
from .exceptions import BadParameter
|
||||
from .utils import LazyFile
|
||||
from .utils import safecall
|
||||
|
||||
|
||||
class ParamType(object):
|
||||
"""Helper for converting values through types. The following is
|
||||
necessary for a valid type:
|
||||
|
||||
* it needs a name
|
||||
* it needs to pass through None unchanged
|
||||
* it needs to convert from a string
|
||||
* it needs to convert its result type through unchanged
|
||||
(eg: needs to be idempotent)
|
||||
* it needs to be able to deal with param and context being `None`.
|
||||
This can be the case when the object is used with prompt
|
||||
inputs.
|
||||
"""
|
||||
|
||||
is_composite = False
|
||||
|
||||
#: the descriptive name of this type
|
||||
name = None
|
||||
|
||||
#: if a list of this type is expected and the value is pulled from a
|
||||
#: string environment variable, this is what splits it up. `None`
|
||||
#: means any whitespace. For all parameters the general rule is that
|
||||
#: whitespace splits them up. The exception are paths and files which
|
||||
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
|
||||
#: Windows).
|
||||
envvar_list_splitter = None
|
||||
|
||||
def __call__(self, value, param=None, ctx=None):
|
||||
if value is not None:
|
||||
return self.convert(value, param, ctx)
|
||||
|
||||
def get_metavar(self, param):
|
||||
"""Returns the metavar default for this param if it provides one."""
|
||||
|
||||
def get_missing_message(self, param):
|
||||
"""Optionally might return extra information about a missing
|
||||
parameter.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
"""Converts the value. This is not invoked for values that are
|
||||
`None` (the missing value).
|
||||
"""
|
||||
return value
|
||||
|
||||
def split_envvar_value(self, rv):
|
||||
"""Given a value from an environment variable this splits it up
|
||||
into small chunks depending on the defined envvar list splitter.
|
||||
|
||||
If the splitter is set to `None`, which means that whitespace splits,
|
||||
then leading and trailing whitespace is ignored. Otherwise, leading
|
||||
and trailing splitters usually lead to empty items being included.
|
||||
"""
|
||||
return (rv or "").split(self.envvar_list_splitter)
|
||||
|
||||
def fail(self, message, param=None, ctx=None):
|
||||
"""Helper method to fail with an invalid value message."""
|
||||
raise BadParameter(message, ctx=ctx, param=param)
|
||||
|
||||
|
||||
class CompositeParamType(ParamType):
|
||||
is_composite = True
|
||||
|
||||
@property
|
||||
def arity(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class FuncParamType(ParamType):
|
||||
def __init__(self, func):
|
||||
self.name = func.__name__
|
||||
self.func = func
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return self.func(value)
|
||||
except ValueError:
|
||||
try:
|
||||
value = text_type(value)
|
||||
except UnicodeError:
|
||||
value = str(value).decode("utf-8", "replace")
|
||||
self.fail(value, param, ctx)
|
||||
|
||||
|
||||
class UnprocessedParamType(ParamType):
|
||||
name = "text"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
return value
|
||||
|
||||
def __repr__(self):
|
||||
return "UNPROCESSED"
|
||||
|
||||
|
||||
class StringParamType(ParamType):
|
||||
name = "text"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if isinstance(value, bytes):
|
||||
enc = _get_argv_encoding()
|
||||
try:
|
||||
value = value.decode(enc)
|
||||
except UnicodeError:
|
||||
fs_enc = get_filesystem_encoding()
|
||||
if fs_enc != enc:
|
||||
try:
|
||||
value = value.decode(fs_enc)
|
||||
except UnicodeError:
|
||||
value = value.decode("utf-8", "replace")
|
||||
else:
|
||||
value = value.decode("utf-8", "replace")
|
||||
return value
|
||||
return value
|
||||
|
||||
def __repr__(self):
|
||||
return "STRING"
|
||||
|
||||
|
||||
class Choice(ParamType):
|
||||
"""The choice type allows a value to be checked against a fixed set
|
||||
of supported values. All of these values have to be strings.
|
||||
|
||||
You should only pass a list or tuple of choices. Other iterables
|
||||
(like generators) may lead to surprising results.
|
||||
|
||||
The resulting value will always be one of the originally passed choices
|
||||
regardless of ``case_sensitive`` or any ``ctx.token_normalize_func``
|
||||
being specified.
|
||||
|
||||
See :ref:`choice-opts` for an example.
|
||||
|
||||
:param case_sensitive: Set to false to make choices case
|
||||
insensitive. Defaults to true.
|
||||
"""
|
||||
|
||||
name = "choice"
|
||||
|
||||
def __init__(self, choices, case_sensitive=True):
|
||||
self.choices = choices
|
||||
self.case_sensitive = case_sensitive
|
||||
|
||||
def get_metavar(self, param):
|
||||
return "[{}]".format("|".join(self.choices))
|
||||
|
||||
def get_missing_message(self, param):
|
||||
return "Choose from:\n\t{}.".format(",\n\t".join(self.choices))
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# Match through normalization and case sensitivity
|
||||
# first do token_normalize_func, then lowercase
|
||||
# preserve original `value` to produce an accurate message in
|
||||
# `self.fail`
|
||||
normed_value = value
|
||||
normed_choices = {choice: choice for choice in self.choices}
|
||||
|
||||
if ctx is not None and ctx.token_normalize_func is not None:
|
||||
normed_value = ctx.token_normalize_func(value)
|
||||
normed_choices = {
|
||||
ctx.token_normalize_func(normed_choice): original
|
||||
for normed_choice, original in normed_choices.items()
|
||||
}
|
||||
|
||||
if not self.case_sensitive:
|
||||
if PY2:
|
||||
lower = str.lower
|
||||
else:
|
||||
lower = str.casefold
|
||||
|
||||
normed_value = lower(normed_value)
|
||||
normed_choices = {
|
||||
lower(normed_choice): original
|
||||
for normed_choice, original in normed_choices.items()
|
||||
}
|
||||
|
||||
if normed_value in normed_choices:
|
||||
return normed_choices[normed_value]
|
||||
|
||||
self.fail(
|
||||
"invalid choice: {}. (choose from {})".format(
|
||||
value, ", ".join(self.choices)
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "Choice('{}')".format(list(self.choices))
|
||||
|
||||
|
||||
class DateTime(ParamType):
|
||||
"""The DateTime type converts date strings into `datetime` objects.
|
||||
|
||||
The format strings which are checked are configurable, but default to some
|
||||
common (non-timezone aware) ISO 8601 formats.
|
||||
|
||||
When specifying *DateTime* formats, you should only pass a list or a tuple.
|
||||
Other iterables, like generators, may lead to surprising results.
|
||||
|
||||
The format strings are processed using ``datetime.strptime``, and this
|
||||
consequently defines the format strings which are allowed.
|
||||
|
||||
Parsing is tried using each format, in order, and the first format which
|
||||
parses successfully is used.
|
||||
|
||||
:param formats: A list or tuple of date format strings, in the order in
|
||||
which they should be tried. Defaults to
|
||||
``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
|
||||
``'%Y-%m-%d %H:%M:%S'``.
|
||||
"""
|
||||
|
||||
name = "datetime"
|
||||
|
||||
def __init__(self, formats=None):
|
||||
self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"]
|
||||
|
||||
def get_metavar(self, param):
|
||||
return "[{}]".format("|".join(self.formats))
|
||||
|
||||
def _try_to_convert_date(self, value, format):
|
||||
try:
|
||||
return datetime.strptime(value, format)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# Exact match
|
||||
for format in self.formats:
|
||||
dtime = self._try_to_convert_date(value, format)
|
||||
if dtime:
|
||||
return dtime
|
||||
|
||||
self.fail(
|
||||
"invalid datetime format: {}. (choose from {})".format(
|
||||
value, ", ".join(self.formats)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "DateTime"
|
||||
|
||||
|
||||
class IntParamType(ParamType):
|
||||
name = "integer"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
self.fail("{} is not a valid integer".format(value), param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return "INT"
|
||||
|
||||
|
||||
class IntRange(IntParamType):
|
||||
"""A parameter that works similar to :data:`click.INT` but restricts
|
||||
the value to fit into a range. The default behavior is to fail if the
|
||||
value falls outside the range, but it can also be silently clamped
|
||||
between the two edges.
|
||||
|
||||
See :ref:`ranges` for an example.
|
||||
"""
|
||||
|
||||
name = "integer range"
|
||||
|
||||
def __init__(self, min=None, max=None, clamp=False):
|
||||
self.min = min
|
||||
self.max = max
|
||||
self.clamp = clamp
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
rv = IntParamType.convert(self, value, param, ctx)
|
||||
if self.clamp:
|
||||
if self.min is not None and rv < self.min:
|
||||
return self.min
|
||||
if self.max is not None and rv > self.max:
|
||||
return self.max
|
||||
if (
|
||||
self.min is not None
|
||||
and rv < self.min
|
||||
or self.max is not None
|
||||
and rv > self.max
|
||||
):
|
||||
if self.min is None:
|
||||
self.fail(
|
||||
"{} is bigger than the maximum valid value {}.".format(
|
||||
rv, self.max
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
elif self.max is None:
|
||||
self.fail(
|
||||
"{} is smaller than the minimum valid value {}.".format(
|
||||
rv, self.min
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
else:
|
||||
self.fail(
|
||||
"{} is not in the valid range of {} to {}.".format(
|
||||
rv, self.min, self.max
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return rv
|
||||
|
||||
def __repr__(self):
|
||||
return "IntRange({}, {})".format(self.min, self.max)
|
||||
|
||||
|
||||
class FloatParamType(ParamType):
|
||||
name = "float"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
self.fail(
|
||||
"{} is not a valid floating point value".format(value), param, ctx
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "FLOAT"
|
||||
|
||||
|
||||
class FloatRange(FloatParamType):
|
||||
"""A parameter that works similar to :data:`click.FLOAT` but restricts
|
||||
the value to fit into a range. The default behavior is to fail if the
|
||||
value falls outside the range, but it can also be silently clamped
|
||||
between the two edges.
|
||||
|
||||
See :ref:`ranges` for an example.
|
||||
"""
|
||||
|
||||
name = "float range"
|
||||
|
||||
def __init__(self, min=None, max=None, clamp=False):
|
||||
self.min = min
|
||||
self.max = max
|
||||
self.clamp = clamp
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
rv = FloatParamType.convert(self, value, param, ctx)
|
||||
if self.clamp:
|
||||
if self.min is not None and rv < self.min:
|
||||
return self.min
|
||||
if self.max is not None and rv > self.max:
|
||||
return self.max
|
||||
if (
|
||||
self.min is not None
|
||||
and rv < self.min
|
||||
or self.max is not None
|
||||
and rv > self.max
|
||||
):
|
||||
if self.min is None:
|
||||
self.fail(
|
||||
"{} is bigger than the maximum valid value {}.".format(
|
||||
rv, self.max
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
elif self.max is None:
|
||||
self.fail(
|
||||
"{} is smaller than the minimum valid value {}.".format(
|
||||
rv, self.min
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
else:
|
||||
self.fail(
|
||||
"{} is not in the valid range of {} to {}.".format(
|
||||
rv, self.min, self.max
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return rv
|
||||
|
||||
def __repr__(self):
|
||||
return "FloatRange({}, {})".format(self.min, self.max)
|
||||
|
||||
|
||||
class BoolParamType(ParamType):
|
||||
name = "boolean"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if isinstance(value, bool):
|
||||
return bool(value)
|
||||
value = value.lower()
|
||||
if value in ("true", "t", "1", "yes", "y"):
|
||||
return True
|
||||
elif value in ("false", "f", "0", "no", "n"):
|
||||
return False
|
||||
self.fail("{} is not a valid boolean".format(value), param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return "BOOL"
|
||||
|
||||
|
||||
class UUIDParameterType(ParamType):
|
||||
name = "uuid"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
import uuid
|
||||
|
||||
try:
|
||||
if PY2 and isinstance(value, text_type):
|
||||
value = value.encode("ascii")
|
||||
return uuid.UUID(value)
|
||||
except ValueError:
|
||||
self.fail("{} is not a valid UUID value".format(value), param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return "UUID"
|
||||
|
||||
|
||||
class File(ParamType):
|
||||
"""Declares a parameter to be a file for reading or writing. The file
|
||||
is automatically closed once the context tears down (after the command
|
||||
finished working).
|
||||
|
||||
Files can be opened for reading or writing. The special value ``-``
|
||||
indicates stdin or stdout depending on the mode.
|
||||
|
||||
By default, the file is opened for reading text data, but it can also be
|
||||
opened in binary mode or for writing. The encoding parameter can be used
|
||||
to force a specific encoding.
|
||||
|
||||
The `lazy` flag controls if the file should be opened immediately or upon
|
||||
first IO. The default is to be non-lazy for standard input and output
|
||||
streams as well as files opened for reading, `lazy` otherwise. When opening a
|
||||
file lazily for reading, it is still opened temporarily for validation, but
|
||||
will not be held open until first IO. lazy is mainly useful when opening
|
||||
for writing to avoid creating the file until it is needed.
|
||||
|
||||
Starting with Click 2.0, files can also be opened atomically in which
|
||||
case all writes go into a separate file in the same folder and upon
|
||||
completion the file will be moved over to the original location. This
|
||||
is useful if a file regularly read by other users is modified.
|
||||
|
||||
See :ref:`file-args` for more information.
|
||||
"""
|
||||
|
||||
name = "filename"
|
||||
envvar_list_splitter = os.path.pathsep
|
||||
|
||||
def __init__(
|
||||
self, mode="r", encoding=None, errors="strict", lazy=None, atomic=False
|
||||
):
|
||||
self.mode = mode
|
||||
self.encoding = encoding
|
||||
self.errors = errors
|
||||
self.lazy = lazy
|
||||
self.atomic = atomic
|
||||
|
||||
def resolve_lazy_flag(self, value):
|
||||
if self.lazy is not None:
|
||||
return self.lazy
|
||||
if value == "-":
|
||||
return False
|
||||
elif "w" in self.mode:
|
||||
return True
|
||||
return False
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
if hasattr(value, "read") or hasattr(value, "write"):
|
||||
return value
|
||||
|
||||
lazy = self.resolve_lazy_flag(value)
|
||||
|
||||
if lazy:
|
||||
f = LazyFile(
|
||||
value, self.mode, self.encoding, self.errors, atomic=self.atomic
|
||||
)
|
||||
if ctx is not None:
|
||||
ctx.call_on_close(f.close_intelligently)
|
||||
return f
|
||||
|
||||
f, should_close = open_stream(
|
||||
value, self.mode, self.encoding, self.errors, atomic=self.atomic
|
||||
)
|
||||
# If a context is provided, we automatically close the file
|
||||
# at the end of the context execution (or flush out). If a
|
||||
# context does not exist, it's the caller's responsibility to
|
||||
# properly close the file. This for instance happens when the
|
||||
# type is used with prompts.
|
||||
if ctx is not None:
|
||||
if should_close:
|
||||
ctx.call_on_close(safecall(f.close))
|
||||
else:
|
||||
ctx.call_on_close(safecall(f.flush))
|
||||
return f
|
||||
except (IOError, OSError) as e: # noqa: B014
|
||||
self.fail(
|
||||
"Could not open file: {}: {}".format(
|
||||
filename_to_ui(value), get_streerror(e)
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
|
||||
|
||||
class Path(ParamType):
|
||||
"""The path type is similar to the :class:`File` type but it performs
|
||||
different checks. First of all, instead of returning an open file
|
||||
handle it returns just the filename. Secondly, it can perform various
|
||||
basic checks about what the file or directory should be.
|
||||
|
||||
.. versionchanged:: 6.0
|
||||
`allow_dash` was added.
|
||||
|
||||
:param exists: if set to true, the file or directory needs to exist for
|
||||
this value to be valid. If this is not required and a
|
||||
file does indeed not exist, then all further checks are
|
||||
silently skipped.
|
||||
:param file_okay: controls if a file is a possible value.
|
||||
:param dir_okay: controls if a directory is a possible value.
|
||||
:param writable: if true, a writable check is performed.
|
||||
:param readable: if true, a readable check is performed.
|
||||
:param resolve_path: if this is true, then the path is fully resolved
|
||||
before the value is passed onwards. This means
|
||||
that it's absolute and symlinks are resolved. It
|
||||
will not expand a tilde-prefix, as this is
|
||||
supposed to be done by the shell only.
|
||||
:param allow_dash: If this is set to `True`, a single dash to indicate
|
||||
standard streams is permitted.
|
||||
:param path_type: optionally a string type that should be used to
|
||||
represent the path. The default is `None` which
|
||||
means the return value will be either bytes or
|
||||
unicode depending on what makes most sense given the
|
||||
input data Click deals with.
|
||||
"""
|
||||
|
||||
envvar_list_splitter = os.path.pathsep
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exists=False,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=False,
|
||||
readable=True,
|
||||
resolve_path=False,
|
||||
allow_dash=False,
|
||||
path_type=None,
|
||||
):
|
||||
self.exists = exists
|
||||
self.file_okay = file_okay
|
||||
self.dir_okay = dir_okay
|
||||
self.writable = writable
|
||||
self.readable = readable
|
||||
self.resolve_path = resolve_path
|
||||
self.allow_dash = allow_dash
|
||||
self.type = path_type
|
||||
|
||||
if self.file_okay and not self.dir_okay:
|
||||
self.name = "file"
|
||||
self.path_type = "File"
|
||||
elif self.dir_okay and not self.file_okay:
|
||||
self.name = "directory"
|
||||
self.path_type = "Directory"
|
||||
else:
|
||||
self.name = "path"
|
||||
self.path_type = "Path"
|
||||
|
||||
def coerce_path_result(self, rv):
|
||||
if self.type is not None and not isinstance(rv, self.type):
|
||||
if self.type is text_type:
|
||||
rv = rv.decode(get_filesystem_encoding())
|
||||
else:
|
||||
rv = rv.encode(get_filesystem_encoding())
|
||||
return rv
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
rv = value
|
||||
|
||||
is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-")
|
||||
|
||||
if not is_dash:
|
||||
if self.resolve_path:
|
||||
rv = os.path.realpath(rv)
|
||||
|
||||
try:
|
||||
st = os.stat(rv)
|
||||
except OSError:
|
||||
if not self.exists:
|
||||
return self.coerce_path_result(rv)
|
||||
self.fail(
|
||||
"{} '{}' does not exist.".format(
|
||||
self.path_type, filename_to_ui(value)
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
|
||||
if not self.file_okay and stat.S_ISREG(st.st_mode):
|
||||
self.fail(
|
||||
"{} '{}' is a file.".format(self.path_type, filename_to_ui(value)),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
if not self.dir_okay and stat.S_ISDIR(st.st_mode):
|
||||
self.fail(
|
||||
"{} '{}' is a directory.".format(
|
||||
self.path_type, filename_to_ui(value)
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
if self.writable and not os.access(value, os.W_OK):
|
||||
self.fail(
|
||||
"{} '{}' is not writable.".format(
|
||||
self.path_type, filename_to_ui(value)
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
if self.readable and not os.access(value, os.R_OK):
|
||||
self.fail(
|
||||
"{} '{}' is not readable.".format(
|
||||
self.path_type, filename_to_ui(value)
|
||||
),
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
|
||||
return self.coerce_path_result(rv)
|
||||
|
||||
|
||||
class Tuple(CompositeParamType):
|
||||
"""The default behavior of Click is to apply a type on a value directly.
|
||||
This works well in most cases, except for when `nargs` is set to a fixed
|
||||
count and different types should be used for different items. In this
|
||||
case the :class:`Tuple` type can be used. This type can only be used
|
||||
if `nargs` is set to a fixed number.
|
||||
|
||||
For more information see :ref:`tuple-type`.
|
||||
|
||||
This can be selected by using a Python tuple literal as a type.
|
||||
|
||||
:param types: a list of types that should be used for the tuple items.
|
||||
"""
|
||||
|
||||
def __init__(self, types):
|
||||
self.types = [convert_type(ty) for ty in types]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "<{}>".format(" ".join(ty.name for ty in self.types))
|
||||
|
||||
@property
|
||||
def arity(self):
|
||||
return len(self.types)
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if len(value) != len(self.types):
|
||||
raise TypeError(
|
||||
"It would appear that nargs is set to conflict with the"
|
||||
" composite type arity."
|
||||
)
|
||||
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
|
||||
|
||||
|
||||
def convert_type(ty, default=None):
|
||||
"""Converts a callable or python type into the most appropriate
|
||||
param type.
|
||||
"""
|
||||
guessed_type = False
|
||||
if ty is None and default is not None:
|
||||
if isinstance(default, tuple):
|
||||
ty = tuple(map(type, default))
|
||||
else:
|
||||
ty = type(default)
|
||||
guessed_type = True
|
||||
|
||||
if isinstance(ty, tuple):
|
||||
return Tuple(ty)
|
||||
if isinstance(ty, ParamType):
|
||||
return ty
|
||||
if ty is text_type or ty is str or ty is None:
|
||||
return STRING
|
||||
if ty is int:
|
||||
return INT
|
||||
# Booleans are only okay if not guessed. This is done because for
|
||||
# flags the default value is actually a bit of a lie in that it
|
||||
# indicates which of the flags is the one we want. See get_default()
|
||||
# for more information.
|
||||
if ty is bool and not guessed_type:
|
||||
return BOOL
|
||||
if ty is float:
|
||||
return FLOAT
|
||||
if guessed_type:
|
||||
return STRING
|
||||
|
||||
# Catch a common mistake
|
||||
if __debug__:
|
||||
try:
|
||||
if issubclass(ty, ParamType):
|
||||
raise AssertionError(
|
||||
"Attempted to use an uninstantiated parameter type ({}).".format(ty)
|
||||
)
|
||||
except TypeError:
|
||||
pass
|
||||
return FuncParamType(ty)
|
||||
|
||||
|
||||
#: A dummy parameter type that just does nothing. From a user's
|
||||
#: perspective this appears to just be the same as `STRING` but internally
|
||||
#: no string conversion takes place. This is necessary to achieve the
|
||||
#: same bytes/unicode behavior on Python 2/3 in situations where you want
|
||||
#: to not convert argument types. This is usually useful when working
|
||||
#: with file paths as they can appear in bytes and unicode.
|
||||
#:
|
||||
#: For path related uses the :class:`Path` type is a better choice but
|
||||
#: there are situations where an unprocessed type is useful which is why
|
||||
#: it is is provided.
|
||||
#:
|
||||
#: .. versionadded:: 4.0
|
||||
UNPROCESSED = UnprocessedParamType()
|
||||
|
||||
#: A unicode string parameter type which is the implicit default. This
|
||||
#: can also be selected by using ``str`` as type.
|
||||
STRING = StringParamType()
|
||||
|
||||
#: An integer parameter. This can also be selected by using ``int`` as
|
||||
#: type.
|
||||
INT = IntParamType()
|
||||
|
||||
#: A floating point value parameter. This can also be selected by using
|
||||
#: ``float`` as type.
|
||||
FLOAT = FloatParamType()
|
||||
|
||||
#: A boolean parameter. This is the default for boolean flags. This can
|
||||
#: also be selected by using ``bool`` as a type.
|
||||
BOOL = BoolParamType()
|
||||
|
||||
#: A UUID parameter.
|
||||
UUID = UUIDParameterType()
|
455
matteo_env/Lib/site-packages/click/utils.py
Normal file
455
matteo_env/Lib/site-packages/click/utils.py
Normal file
|
@ -0,0 +1,455 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from ._compat import _default_text_stderr
|
||||
from ._compat import _default_text_stdout
|
||||
from ._compat import auto_wrap_for_ansi
|
||||
from ._compat import binary_streams
|
||||
from ._compat import filename_to_ui
|
||||
from ._compat import get_filesystem_encoding
|
||||
from ._compat import get_streerror
|
||||
from ._compat import is_bytes
|
||||
from ._compat import open_stream
|
||||
from ._compat import PY2
|
||||
from ._compat import should_strip_ansi
|
||||
from ._compat import string_types
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import text_streams
|
||||
from ._compat import text_type
|
||||
from ._compat import WIN
|
||||
from .globals import resolve_color_default
|
||||
|
||||
if not PY2:
|
||||
from ._compat import _find_binary_writer
|
||||
elif WIN:
|
||||
from ._winconsole import _get_windows_argv
|
||||
from ._winconsole import _hash_py_argv
|
||||
from ._winconsole import _initial_argv_hash
|
||||
|
||||
echo_native_types = string_types + (bytes, bytearray)
|
||||
|
||||
|
||||
def _posixify(name):
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
|
||||
def safecall(func):
|
||||
"""Wraps a function so that it swallows exceptions."""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def make_str(value):
|
||||
"""Converts a value into a valid string."""
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
return value.decode(get_filesystem_encoding())
|
||||
except UnicodeError:
|
||||
return value.decode("utf-8", "replace")
|
||||
return text_type(value)
|
||||
|
||||
|
||||
def make_default_short_help(help, max_length=45):
|
||||
"""Return a condensed version of help string."""
|
||||
words = help.split()
|
||||
total_length = 0
|
||||
result = []
|
||||
done = False
|
||||
|
||||
for word in words:
|
||||
if word[-1:] == ".":
|
||||
done = True
|
||||
new_length = 1 + len(word) if result else len(word)
|
||||
if total_length + new_length > max_length:
|
||||
result.append("...")
|
||||
done = True
|
||||
else:
|
||||
if result:
|
||||
result.append(" ")
|
||||
result.append(word)
|
||||
if done:
|
||||
break
|
||||
total_length += new_length
|
||||
|
||||
return "".join(result)
|
||||
|
||||
|
||||
class LazyFile(object):
|
||||
"""A lazy file works like a regular file but it does not fully open
|
||||
the file but it does perform some basic checks early to see if the
|
||||
filename parameter does make sense. This is useful for safely opening
|
||||
files for writing.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, filename, mode="r", encoding=None, errors="strict", atomic=False
|
||||
):
|
||||
self.name = filename
|
||||
self.mode = mode
|
||||
self.encoding = encoding
|
||||
self.errors = errors
|
||||
self.atomic = atomic
|
||||
|
||||
if filename == "-":
|
||||
self._f, self.should_close = open_stream(filename, mode, encoding, errors)
|
||||
else:
|
||||
if "r" in mode:
|
||||
# Open and close the file in case we're opening it for
|
||||
# reading so that we can catch at least some errors in
|
||||
# some cases early.
|
||||
open(filename, mode).close()
|
||||
self._f = None
|
||||
self.should_close = True
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.open(), name)
|
||||
|
||||
def __repr__(self):
|
||||
if self._f is not None:
|
||||
return repr(self._f)
|
||||
return "<unopened file '{}' {}>".format(self.name, self.mode)
|
||||
|
||||
def open(self):
|
||||
"""Opens the file if it's not yet open. This call might fail with
|
||||
a :exc:`FileError`. Not handling this error will produce an error
|
||||
that Click shows.
|
||||
"""
|
||||
if self._f is not None:
|
||||
return self._f
|
||||
try:
|
||||
rv, self.should_close = open_stream(
|
||||
self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
|
||||
)
|
||||
except (IOError, OSError) as e: # noqa: E402
|
||||
from .exceptions import FileError
|
||||
|
||||
raise FileError(self.name, hint=get_streerror(e))
|
||||
self._f = rv
|
||||
return rv
|
||||
|
||||
def close(self):
|
||||
"""Closes the underlying file, no matter what."""
|
||||
if self._f is not None:
|
||||
self._f.close()
|
||||
|
||||
def close_intelligently(self):
|
||||
"""This function only closes the file if it was opened by the lazy
|
||||
file wrapper. For instance this will never close stdin.
|
||||
"""
|
||||
if self.should_close:
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.close_intelligently()
|
||||
|
||||
def __iter__(self):
|
||||
self.open()
|
||||
return iter(self._f)
|
||||
|
||||
|
||||
class KeepOpenFile(object):
|
||||
def __init__(self, file):
|
||||
self._file = file
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._file, name)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._file)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._file)
|
||||
|
||||
|
||||
def echo(message=None, file=None, nl=True, err=False, color=None):
|
||||
"""Prints a message plus a newline to the given file or stdout. On
|
||||
first sight, this looks like the print function, but it has improved
|
||||
support for handling Unicode and binary data that does not fail no
|
||||
matter how badly configured the system is.
|
||||
|
||||
Primarily it means that you can print binary data as well as Unicode
|
||||
data on both 2.x and 3.x to the given file in the most appropriate way
|
||||
possible. This is a very carefree function in that it will try its
|
||||
best to not fail. As of Click 6.0 this includes support for unicode
|
||||
output on the Windows console.
|
||||
|
||||
In addition to that, if `colorama`_ is installed, the echo function will
|
||||
also support clever handling of ANSI codes. Essentially it will then
|
||||
do the following:
|
||||
|
||||
- add transparent handling of ANSI color codes on Windows.
|
||||
- hide ANSI codes automatically if the destination file is not a
|
||||
terminal.
|
||||
|
||||
.. _colorama: https://pypi.org/project/colorama/
|
||||
|
||||
.. versionchanged:: 6.0
|
||||
As of Click 6.0 the echo function will properly support unicode
|
||||
output on the windows console. Not that click does not modify
|
||||
the interpreter in any way which means that `sys.stdout` or the
|
||||
print statement or function will still not provide unicode support.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Starting with version 2.0 of Click, the echo function will work
|
||||
with colorama if it's installed.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
The `err` parameter was added.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the `color` flag.
|
||||
|
||||
:param message: the message to print
|
||||
:param file: the file to write to (defaults to ``stdout``)
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``. This is faster and easier than calling
|
||||
:func:`get_text_stderr` yourself.
|
||||
:param nl: if set to `True` (the default) a newline is printed afterwards.
|
||||
:param color: controls if the terminal supports ANSI colors or not. The
|
||||
default is autodetection.
|
||||
"""
|
||||
if file is None:
|
||||
if err:
|
||||
file = _default_text_stderr()
|
||||
else:
|
||||
file = _default_text_stdout()
|
||||
|
||||
# Convert non bytes/text into the native string type.
|
||||
if message is not None and not isinstance(message, echo_native_types):
|
||||
message = text_type(message)
|
||||
|
||||
if nl:
|
||||
message = message or u""
|
||||
if isinstance(message, text_type):
|
||||
message += u"\n"
|
||||
else:
|
||||
message += b"\n"
|
||||
|
||||
# If there is a message, and we're in Python 3, and the value looks
|
||||
# like bytes, we manually need to find the binary stream and write the
|
||||
# message in there. This is done separately so that most stream
|
||||
# types will work as you would expect. Eg: you can write to StringIO
|
||||
# for other cases.
|
||||
if message and not PY2 and is_bytes(message):
|
||||
binary_file = _find_binary_writer(file)
|
||||
if binary_file is not None:
|
||||
file.flush()
|
||||
binary_file.write(message)
|
||||
binary_file.flush()
|
||||
return
|
||||
|
||||
# ANSI-style support. If there is no message or we are dealing with
|
||||
# bytes nothing is happening. If we are connected to a file we want
|
||||
# to strip colors. If we are on windows we either wrap the stream
|
||||
# to strip the color or we use the colorama support to translate the
|
||||
# ansi codes to API calls.
|
||||
if message and not is_bytes(message):
|
||||
color = resolve_color_default(color)
|
||||
if should_strip_ansi(file, color):
|
||||
message = strip_ansi(message)
|
||||
elif WIN:
|
||||
if auto_wrap_for_ansi is not None:
|
||||
file = auto_wrap_for_ansi(file)
|
||||
elif not color:
|
||||
message = strip_ansi(message)
|
||||
|
||||
if message:
|
||||
file.write(message)
|
||||
file.flush()
|
||||
|
||||
|
||||
def get_binary_stream(name):
|
||||
"""Returns a system stream for byte processing. This essentially
|
||||
returns the stream from the sys module with the given name but it
|
||||
solves some compatibility issues between different Python versions.
|
||||
Primarily this function is necessary for getting binary streams on
|
||||
Python 3.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
"""
|
||||
opener = binary_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError("Unknown standard stream '{}'".format(name))
|
||||
return opener()
|
||||
|
||||
|
||||
def get_text_stream(name, encoding=None, errors="strict"):
|
||||
"""Returns a system stream for text processing. This usually returns
|
||||
a wrapped stream around a binary stream returned from
|
||||
:func:`get_binary_stream` but it also can take shortcuts on Python 3
|
||||
for already correctly configured streams.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
:param encoding: overrides the detected default encoding.
|
||||
:param errors: overrides the default error mode.
|
||||
"""
|
||||
opener = text_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError("Unknown standard stream '{}'".format(name))
|
||||
return opener(encoding, errors)
|
||||
|
||||
|
||||
def open_file(
|
||||
filename, mode="r", encoding=None, errors="strict", lazy=False, atomic=False
|
||||
):
|
||||
"""This is similar to how the :class:`File` works but for manual
|
||||
usage. Files are opened non lazy by default. This can open regular
|
||||
files as well as stdin/stdout if ``'-'`` is passed.
|
||||
|
||||
If stdin/stdout is returned the stream is wrapped so that the context
|
||||
manager will not close the stream accidentally. This makes it possible
|
||||
to always use the function like this without having to worry to
|
||||
accidentally close a standard stream::
|
||||
|
||||
with open_file(filename) as f:
|
||||
...
|
||||
|
||||
.. versionadded:: 3.0
|
||||
|
||||
:param filename: the name of the file to open (or ``'-'`` for stdin/stdout).
|
||||
:param mode: the mode in which to open the file.
|
||||
:param encoding: the encoding to use.
|
||||
:param errors: the error handling for this file.
|
||||
:param lazy: can be flipped to true to open the file lazily.
|
||||
:param atomic: in atomic mode writes go into a temporary file and it's
|
||||
moved on close.
|
||||
"""
|
||||
if lazy:
|
||||
return LazyFile(filename, mode, encoding, errors, atomic=atomic)
|
||||
f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
|
||||
if not should_close:
|
||||
f = KeepOpenFile(f)
|
||||
return f
|
||||
|
||||
|
||||
def get_os_args():
|
||||
"""This returns the argument part of sys.argv in the most appropriate
|
||||
form for processing. What this means is that this return value is in
|
||||
a format that works for Click to process but does not necessarily
|
||||
correspond well to what's actually standard for the interpreter.
|
||||
|
||||
On most environments the return value is ``sys.argv[:1]`` unchanged.
|
||||
However if you are on Windows and running Python 2 the return value
|
||||
will actually be a list of unicode strings instead because the
|
||||
default behavior on that platform otherwise will not be able to
|
||||
carry all possible values that sys.argv can have.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
"""
|
||||
# We can only extract the unicode argv if sys.argv has not been
|
||||
# changed since the startup of the application.
|
||||
if PY2 and WIN and _initial_argv_hash == _hash_py_argv():
|
||||
return _get_windows_argv()
|
||||
return sys.argv[1:]
|
||||
|
||||
|
||||
def format_filename(filename, shorten=False):
|
||||
"""Formats a filename for user display. The main purpose of this
|
||||
function is to ensure that the filename can be displayed at all. This
|
||||
will decode the filename to unicode if necessary in a way that it will
|
||||
not fail. Optionally, it can shorten the filename to not include the
|
||||
full path to the filename.
|
||||
|
||||
:param filename: formats a filename for UI display. This will also convert
|
||||
the filename into unicode without failing.
|
||||
:param shorten: this optionally shortens the filename to strip of the
|
||||
path that leads up to it.
|
||||
"""
|
||||
if shorten:
|
||||
filename = os.path.basename(filename)
|
||||
return filename_to_ui(filename)
|
||||
|
||||
|
||||
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||
the following folders could be returned:
|
||||
|
||||
Mac OS X:
|
||||
``~/Library/Application Support/Foo Bar``
|
||||
Mac OS X (POSIX):
|
||||
``~/.foo-bar``
|
||||
Unix:
|
||||
``~/.config/foo-bar``
|
||||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Win XP (roaming):
|
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
|
||||
Win XP (not roaming):
|
||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||
Win 7 (roaming):
|
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||
Win 7 (not roaming):
|
||||
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param app_name: the application name. This should be properly capitalized
|
||||
and can contain whitespace.
|
||||
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||
Has no affect otherwise.
|
||||
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||
folder will be stored in the home folder with a leading
|
||||
dot instead of the XDG config home or darwin's
|
||||
application support folder.
|
||||
"""
|
||||
if WIN:
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
folder = os.environ.get(key)
|
||||
if folder is None:
|
||||
folder = os.path.expanduser("~")
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||
if sys.platform == "darwin":
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/Library/Application Support"), app_name
|
||||
)
|
||||
return os.path.join(
|
||||
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||
_posixify(app_name),
|
||||
)
|
||||
|
||||
|
||||
class PacifyFlushWrapper(object):
|
||||
"""This wrapper is used to catch and suppress BrokenPipeErrors resulting
|
||||
from ``.flush()`` being called on broken pipe during the shutdown/final-GC
|
||||
of the Python interpreter. Notably ``.flush()`` is always called on
|
||||
``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
|
||||
other cleanup code, and the case where the underlying file is not a broken
|
||||
pipe, all calls and attributes are proxied.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
|
||||
def flush(self):
|
||||
try:
|
||||
self.wrapped.flush()
|
||||
except IOError as e:
|
||||
import errno
|
||||
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.wrapped, attr)
|
25
matteo_env/Lib/site-packages/engineio/__init__.py
Normal file
25
matteo_env/Lib/site-packages/engineio/__init__.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import sys
|
||||
|
||||
from .client import Client
|
||||
from .middleware import WSGIApp, Middleware
|
||||
from .server import Server
|
||||
if sys.version_info >= (3, 5): # pragma: no cover
|
||||
from .asyncio_server import AsyncServer
|
||||
from .asyncio_client import AsyncClient
|
||||
from .async_drivers.asgi import ASGIApp
|
||||
try:
|
||||
from .async_drivers.tornado import get_tornado_handler
|
||||
except ImportError:
|
||||
get_tornado_handler = None
|
||||
else: # pragma: no cover
|
||||
AsyncServer = None
|
||||
AsyncClient = None
|
||||
get_tornado_handler = None
|
||||
ASGIApp = None
|
||||
|
||||
__version__ = '4.0.0'
|
||||
|
||||
__all__ = ['__version__', 'Server', 'WSGIApp', 'Middleware', 'Client']
|
||||
if AsyncServer is not None: # pragma: no cover
|
||||
__all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler',
|
||||
'AsyncClient'],
|
127
matteo_env/Lib/site-packages/engineio/async_drivers/aiohttp.py
Normal file
127
matteo_env/Lib/site-packages/engineio/async_drivers/aiohttp.py
Normal file
|
@ -0,0 +1,127 @@
|
|||
import asyncio
|
||||
import sys
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from aiohttp.web import Response, WebSocketResponse
|
||||
|
||||
|
||||
def create_route(app, engineio_server, engineio_endpoint):
|
||||
"""This function sets up the engine.io endpoint as a route for the
|
||||
application.
|
||||
|
||||
Note that both GET and POST requests must be hooked up on the engine.io
|
||||
endpoint.
|
||||
"""
|
||||
app.router.add_get(engineio_endpoint, engineio_server.handle_request)
|
||||
app.router.add_post(engineio_endpoint, engineio_server.handle_request)
|
||||
app.router.add_route('OPTIONS', engineio_endpoint,
|
||||
engineio_server.handle_request)
|
||||
|
||||
|
||||
def translate_request(request):
|
||||
"""This function takes the arguments passed to the request handler and
|
||||
uses them to generate a WSGI compatible environ dictionary.
|
||||
"""
|
||||
message = request._message
|
||||
payload = request._payload
|
||||
|
||||
uri_parts = urlsplit(message.path)
|
||||
environ = {
|
||||
'wsgi.input': payload,
|
||||
'wsgi.errors': sys.stderr,
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.async': True,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False,
|
||||
'SERVER_SOFTWARE': 'aiohttp',
|
||||
'REQUEST_METHOD': message.method,
|
||||
'QUERY_STRING': uri_parts.query or '',
|
||||
'RAW_URI': message.path,
|
||||
'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version,
|
||||
'REMOTE_ADDR': '127.0.0.1',
|
||||
'REMOTE_PORT': '0',
|
||||
'SERVER_NAME': 'aiohttp',
|
||||
'SERVER_PORT': '0',
|
||||
'aiohttp.request': request
|
||||
}
|
||||
|
||||
for hdr_name, hdr_value in message.headers.items():
|
||||
hdr_name = hdr_name.upper()
|
||||
if hdr_name == 'CONTENT-TYPE':
|
||||
environ['CONTENT_TYPE'] = hdr_value
|
||||
continue
|
||||
elif hdr_name == 'CONTENT-LENGTH':
|
||||
environ['CONTENT_LENGTH'] = hdr_value
|
||||
continue
|
||||
|
||||
key = 'HTTP_%s' % hdr_name.replace('-', '_')
|
||||
if key in environ:
|
||||
hdr_value = '%s,%s' % (environ[key], hdr_value)
|
||||
|
||||
environ[key] = hdr_value
|
||||
|
||||
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
|
||||
|
||||
path_info = uri_parts.path
|
||||
|
||||
environ['PATH_INFO'] = path_info
|
||||
environ['SCRIPT_NAME'] = ''
|
||||
|
||||
return environ
|
||||
|
||||
|
||||
def make_response(status, headers, payload, environ):
|
||||
"""This function generates an appropriate response object for this async
|
||||
mode.
|
||||
"""
|
||||
return Response(body=payload, status=int(status.split()[0]),
|
||||
headers=headers)
|
||||
|
||||
|
||||
class WebSocket(object): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides a aiohttp WebSocket interface that is
|
||||
somewhat compatible with eventlet's implementation.
|
||||
"""
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self._sock = None
|
||||
|
||||
async def __call__(self, environ):
|
||||
request = environ['aiohttp.request']
|
||||
self._sock = WebSocketResponse()
|
||||
await self._sock.prepare(request)
|
||||
|
||||
self.environ = environ
|
||||
await self.handler(self)
|
||||
return self._sock
|
||||
|
||||
async def close(self):
|
||||
await self._sock.close()
|
||||
|
||||
async def send(self, message):
|
||||
if isinstance(message, bytes):
|
||||
f = self._sock.send_bytes
|
||||
else:
|
||||
f = self._sock.send_str
|
||||
if asyncio.iscoroutinefunction(f):
|
||||
await f(message)
|
||||
else:
|
||||
f(message)
|
||||
|
||||
async def wait(self):
|
||||
msg = await self._sock.receive()
|
||||
if not isinstance(msg.data, bytes) and \
|
||||
not isinstance(msg.data, str):
|
||||
raise IOError()
|
||||
return msg.data
|
||||
|
||||
|
||||
_async = {
|
||||
'asyncio': True,
|
||||
'create_route': create_route,
|
||||
'translate_request': translate_request,
|
||||
'make_response': make_response,
|
||||
'websocket': WebSocket,
|
||||
}
|
250
matteo_env/Lib/site-packages/engineio/async_drivers/asgi.py
Normal file
250
matteo_env/Lib/site-packages/engineio/async_drivers/asgi.py
Normal file
|
@ -0,0 +1,250 @@
|
|||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
|
||||
from engineio.static_files import get_static_file
|
||||
|
||||
|
||||
class ASGIApp:
|
||||
"""ASGI application middleware for Engine.IO.
|
||||
|
||||
This middleware dispatches traffic to an Engine.IO application. It can
|
||||
also serve a list of static files to the client, or forward unrelated
|
||||
HTTP traffic to another ASGI application.
|
||||
|
||||
:param engineio_server: The Engine.IO server. Must be an instance of the
|
||||
``engineio.AsyncServer`` class.
|
||||
:param static_files: A dictionary with static file mapping rules. See the
|
||||
documentation for details on this argument.
|
||||
:param other_asgi_app: A separate ASGI app that receives all other traffic.
|
||||
:param engineio_path: The endpoint where the Engine.IO application should
|
||||
be installed. The default value is appropriate for
|
||||
most cases.
|
||||
:param on_startup: function to be called on application startup; can be
|
||||
coroutine
|
||||
:param on_shutdown: function to be called on application shutdown; can be
|
||||
coroutine
|
||||
|
||||
Example usage::
|
||||
|
||||
import engineio
|
||||
import uvicorn
|
||||
|
||||
eio = engineio.AsyncServer()
|
||||
app = engineio.ASGIApp(eio, static_files={
|
||||
'/': {'content_type': 'text/html', 'filename': 'index.html'},
|
||||
'/index.html': {'content_type': 'text/html',
|
||||
'filename': 'index.html'},
|
||||
})
|
||||
uvicorn.run(app, '127.0.0.1', 5000)
|
||||
"""
|
||||
def __init__(self, engineio_server, other_asgi_app=None,
|
||||
static_files=None, engineio_path='engine.io',
|
||||
on_startup=None, on_shutdown=None):
|
||||
self.engineio_server = engineio_server
|
||||
self.other_asgi_app = other_asgi_app
|
||||
self.engineio_path = engineio_path.strip('/')
|
||||
self.static_files = static_files or {}
|
||||
self.on_startup = on_startup
|
||||
self.on_shutdown = on_shutdown
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
if scope['type'] in ['http', 'websocket'] and \
|
||||
scope['path'].startswith('/{0}/'.format(self.engineio_path)):
|
||||
await self.engineio_server.handle_request(scope, receive, send)
|
||||
else:
|
||||
static_file = get_static_file(scope['path'], self.static_files) \
|
||||
if scope['type'] == 'http' and self.static_files else None
|
||||
if static_file:
|
||||
await self.serve_static_file(static_file, receive, send)
|
||||
elif self.other_asgi_app is not None:
|
||||
await self.other_asgi_app(scope, receive, send)
|
||||
elif scope['type'] == 'lifespan':
|
||||
await self.lifespan(receive, send)
|
||||
else:
|
||||
await self.not_found(receive, send)
|
||||
|
||||
async def serve_static_file(self, static_file, receive,
|
||||
send): # pragma: no cover
|
||||
event = await receive()
|
||||
if event['type'] == 'http.request':
|
||||
if os.path.exists(static_file['filename']):
|
||||
with open(static_file['filename'], 'rb') as f:
|
||||
payload = f.read()
|
||||
await send({'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': [(b'Content-Type', static_file[
|
||||
'content_type'].encode('utf-8'))]})
|
||||
await send({'type': 'http.response.body',
|
||||
'body': payload})
|
||||
else:
|
||||
await self.not_found(receive, send)
|
||||
|
||||
async def lifespan(self, receive, send):
|
||||
while True:
|
||||
event = await receive()
|
||||
if event['type'] == 'lifespan.startup':
|
||||
if self.on_startup:
|
||||
try:
|
||||
await self.on_startup() \
|
||||
if asyncio.iscoroutinefunction(self.on_startup) \
|
||||
else self.on_startup()
|
||||
except:
|
||||
await send({'type': 'lifespan.startup.failed'})
|
||||
return
|
||||
await send({'type': 'lifespan.startup.complete'})
|
||||
elif event['type'] == 'lifespan.shutdown':
|
||||
if self.on_shutdown:
|
||||
try:
|
||||
await self.on_shutdown() \
|
||||
if asyncio.iscoroutinefunction(self.on_shutdown) \
|
||||
else self.on_shutdown()
|
||||
except:
|
||||
await send({'type': 'lifespan.shutdown.failed'})
|
||||
return
|
||||
await send({'type': 'lifespan.shutdown.complete'})
|
||||
return
|
||||
|
||||
async def not_found(self, receive, send):
|
||||
"""Return a 404 Not Found error to the client."""
|
||||
await send({'type': 'http.response.start',
|
||||
'status': 404,
|
||||
'headers': [(b'Content-Type', b'text/plain')]})
|
||||
await send({'type': 'http.response.body',
|
||||
'body': b'Not Found'})
|
||||
|
||||
|
||||
async def translate_request(scope, receive, send):
|
||||
class AwaitablePayload(object): # pragma: no cover
|
||||
def __init__(self, payload):
|
||||
self.payload = payload or b''
|
||||
|
||||
async def read(self, length=None):
|
||||
if length is None:
|
||||
r = self.payload
|
||||
self.payload = b''
|
||||
else:
|
||||
r = self.payload[:length]
|
||||
self.payload = self.payload[length:]
|
||||
return r
|
||||
|
||||
event = await receive()
|
||||
payload = b''
|
||||
if event['type'] == 'http.request':
|
||||
payload += event.get('body') or b''
|
||||
while event.get('more_body'):
|
||||
event = await receive()
|
||||
if event['type'] == 'http.request':
|
||||
payload += event.get('body') or b''
|
||||
elif event['type'] == 'websocket.connect':
|
||||
pass
|
||||
else:
|
||||
return {}
|
||||
|
||||
raw_uri = scope['path'].encode('utf-8')
|
||||
if 'query_string' in scope and scope['query_string']:
|
||||
raw_uri += b'?' + scope['query_string']
|
||||
environ = {
|
||||
'wsgi.input': AwaitablePayload(payload),
|
||||
'wsgi.errors': sys.stderr,
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.async': True,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False,
|
||||
'SERVER_SOFTWARE': 'asgi',
|
||||
'REQUEST_METHOD': scope.get('method', 'GET'),
|
||||
'PATH_INFO': scope['path'],
|
||||
'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'),
|
||||
'RAW_URI': raw_uri.decode('utf-8'),
|
||||
'SCRIPT_NAME': '',
|
||||
'SERVER_PROTOCOL': 'HTTP/1.1',
|
||||
'REMOTE_ADDR': '127.0.0.1',
|
||||
'REMOTE_PORT': '0',
|
||||
'SERVER_NAME': 'asgi',
|
||||
'SERVER_PORT': '0',
|
||||
'asgi.receive': receive,
|
||||
'asgi.send': send,
|
||||
'asgi.scope': scope,
|
||||
}
|
||||
|
||||
for hdr_name, hdr_value in scope['headers']:
|
||||
hdr_name = hdr_name.upper().decode('utf-8')
|
||||
hdr_value = hdr_value.decode('utf-8')
|
||||
if hdr_name == 'CONTENT-TYPE':
|
||||
environ['CONTENT_TYPE'] = hdr_value
|
||||
continue
|
||||
elif hdr_name == 'CONTENT-LENGTH':
|
||||
environ['CONTENT_LENGTH'] = hdr_value
|
||||
continue
|
||||
|
||||
key = 'HTTP_%s' % hdr_name.replace('-', '_')
|
||||
if key in environ:
|
||||
hdr_value = '%s,%s' % (environ[key], hdr_value)
|
||||
|
||||
environ[key] = hdr_value
|
||||
|
||||
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
|
||||
return environ
|
||||
|
||||
|
||||
async def make_response(status, headers, payload, environ):
|
||||
headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers]
|
||||
if 'HTTP_SEC_WEBSOCKET_VERSION' in environ:
|
||||
if status.startswith('200 '):
|
||||
await environ['asgi.send']({'type': 'websocket.accept',
|
||||
'headers': headers})
|
||||
else:
|
||||
await environ['asgi.send']({'type': 'websocket.close'})
|
||||
return
|
||||
|
||||
await environ['asgi.send']({'type': 'http.response.start',
|
||||
'status': int(status.split(' ')[0]),
|
||||
'headers': headers})
|
||||
await environ['asgi.send']({'type': 'http.response.body',
|
||||
'body': payload})
|
||||
|
||||
|
||||
class WebSocket(object): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides an asgi WebSocket interface that is
|
||||
somewhat compatible with eventlet's implementation.
|
||||
"""
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self.asgi_receive = None
|
||||
self.asgi_send = None
|
||||
|
||||
async def __call__(self, environ):
|
||||
self.asgi_receive = environ['asgi.receive']
|
||||
self.asgi_send = environ['asgi.send']
|
||||
await self.asgi_send({'type': 'websocket.accept'})
|
||||
await self.handler(self)
|
||||
|
||||
async def close(self):
|
||||
await self.asgi_send({'type': 'websocket.close'})
|
||||
|
||||
async def send(self, message):
|
||||
msg_bytes = None
|
||||
msg_text = None
|
||||
if isinstance(message, bytes):
|
||||
msg_bytes = message
|
||||
else:
|
||||
msg_text = message
|
||||
await self.asgi_send({'type': 'websocket.send',
|
||||
'bytes': msg_bytes,
|
||||
'text': msg_text})
|
||||
|
||||
async def wait(self):
|
||||
event = await self.asgi_receive()
|
||||
if event['type'] != 'websocket.receive':
|
||||
raise IOError()
|
||||
return event.get('bytes') or event.get('text')
|
||||
|
||||
|
||||
_async = {
|
||||
'asyncio': True,
|
||||
'translate_request': translate_request,
|
||||
'make_response': make_response,
|
||||
'websocket': WebSocket,
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from eventlet.green.threading import Thread, Event
|
||||
from eventlet import queue
|
||||
from eventlet import sleep
|
||||
from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI
|
||||
|
||||
|
||||
class WebSocketWSGI(_WebSocketWSGI):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(WebSocketWSGI, self).__init__(*args, **kwargs)
|
||||
self._sock = None
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
if 'eventlet.input' not in environ:
|
||||
raise RuntimeError('You need to use the eventlet server. '
|
||||
'See the Deployment section of the '
|
||||
'documentation for more information.')
|
||||
self._sock = environ['eventlet.input'].get_socket()
|
||||
return super(WebSocketWSGI, self).__call__(environ, start_response)
|
||||
|
||||
|
||||
_async = {
|
||||
'thread': Thread,
|
||||
'queue': queue.Queue,
|
||||
'queue_empty': queue.Empty,
|
||||
'event': Event,
|
||||
'websocket': WebSocketWSGI,
|
||||
'sleep': sleep,
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import gevent
|
||||
from gevent import queue
|
||||
from gevent.event import Event
|
||||
try:
|
||||
import geventwebsocket # noqa
|
||||
_websocket_available = True
|
||||
except ImportError:
|
||||
_websocket_available = False
|
||||
|
||||
|
||||
class Thread(gevent.Greenlet): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides gevent Greenlet interface that is compatible
|
||||
with the standard library's Thread class.
|
||||
"""
|
||||
def __init__(self, target, args=[], kwargs={}):
|
||||
super(Thread, self).__init__(target, *args, **kwargs)
|
||||
|
||||
def _run(self):
|
||||
return self.run()
|
||||
|
||||
|
||||
class WebSocketWSGI(object): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides a gevent WebSocket interface that is
|
||||
compatible with eventlet's implementation.
|
||||
"""
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
if 'wsgi.websocket' not in environ:
|
||||
raise RuntimeError('You need to use the gevent-websocket server. '
|
||||
'See the Deployment section of the '
|
||||
'documentation for more information.')
|
||||
self._sock = environ['wsgi.websocket']
|
||||
self.environ = environ
|
||||
self.version = self._sock.version
|
||||
self.path = self._sock.path
|
||||
self.origin = self._sock.origin
|
||||
self.protocol = self._sock.protocol
|
||||
return self.app(self)
|
||||
|
||||
def close(self):
|
||||
return self._sock.close()
|
||||
|
||||
def send(self, message):
|
||||
return self._sock.send(message)
|
||||
|
||||
def wait(self):
|
||||
return self._sock.receive()
|
||||
|
||||
|
||||
_async = {
|
||||
'thread': Thread,
|
||||
'queue': queue.JoinableQueue,
|
||||
'queue_empty': queue.Empty,
|
||||
'event': Event,
|
||||
'websocket': WebSocketWSGI if _websocket_available else None,
|
||||
'sleep': gevent.sleep,
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import gevent
|
||||
from gevent import queue
|
||||
from gevent.event import Event
|
||||
import uwsgi
|
||||
_websocket_available = hasattr(uwsgi, 'websocket_handshake')
|
||||
|
||||
|
||||
class Thread(gevent.Greenlet): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides gevent Greenlet interface that is compatible
|
||||
with the standard library's Thread class.
|
||||
"""
|
||||
def __init__(self, target, args=[], kwargs={}):
|
||||
super(Thread, self).__init__(target, *args, **kwargs)
|
||||
|
||||
def _run(self):
|
||||
return self.run()
|
||||
|
||||
|
||||
class uWSGIWebSocket(object): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides a uWSGI WebSocket interface that is
|
||||
compatible with eventlet's implementation.
|
||||
"""
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
self._sock = None
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
self._sock = uwsgi.connection_fd()
|
||||
self.environ = environ
|
||||
|
||||
uwsgi.websocket_handshake()
|
||||
|
||||
self._req_ctx = None
|
||||
if hasattr(uwsgi, 'request_context'):
|
||||
# uWSGI >= 2.1.x with support for api access across-greenlets
|
||||
self._req_ctx = uwsgi.request_context()
|
||||
else:
|
||||
# use event and queue for sending messages
|
||||
from gevent.event import Event
|
||||
from gevent.queue import Queue
|
||||
from gevent.select import select
|
||||
self._event = Event()
|
||||
self._send_queue = Queue()
|
||||
|
||||
# spawn a select greenlet
|
||||
def select_greenlet_runner(fd, event):
|
||||
"""Sets event when data becomes available to read on fd."""
|
||||
while True:
|
||||
event.set()
|
||||
try:
|
||||
select([fd], [], [])[0]
|
||||
except ValueError:
|
||||
break
|
||||
self._select_greenlet = gevent.spawn(
|
||||
select_greenlet_runner,
|
||||
self._sock,
|
||||
self._event)
|
||||
|
||||
self.app(self)
|
||||
|
||||
def close(self):
|
||||
"""Disconnects uWSGI from the client."""
|
||||
uwsgi.disconnect()
|
||||
if self._req_ctx is None:
|
||||
# better kill it here in case wait() is not called again
|
||||
self._select_greenlet.kill()
|
||||
self._event.set()
|
||||
|
||||
def _send(self, msg):
|
||||
"""Transmits message either in binary or UTF-8 text mode,
|
||||
depending on its type."""
|
||||
if isinstance(msg, bytes):
|
||||
method = uwsgi.websocket_send_binary
|
||||
else:
|
||||
method = uwsgi.websocket_send
|
||||
if self._req_ctx is not None:
|
||||
method(msg, request_context=self._req_ctx)
|
||||
else:
|
||||
method(msg)
|
||||
|
||||
def _decode_received(self, msg):
|
||||
"""Returns either bytes or str, depending on message type."""
|
||||
if not isinstance(msg, bytes):
|
||||
# already decoded - do nothing
|
||||
return msg
|
||||
# only decode from utf-8 if message is not binary data
|
||||
type = ord(msg[0:1])
|
||||
if type >= 48: # no binary
|
||||
return msg.decode('utf-8')
|
||||
# binary message, don't try to decode
|
||||
return msg
|
||||
|
||||
def send(self, msg):
|
||||
"""Queues a message for sending. Real transmission is done in
|
||||
wait method.
|
||||
Sends directly if uWSGI version is new enough."""
|
||||
if self._req_ctx is not None:
|
||||
self._send(msg)
|
||||
else:
|
||||
self._send_queue.put(msg)
|
||||
self._event.set()
|
||||
|
||||
def wait(self):
|
||||
"""Waits and returns received messages.
|
||||
If running in compatibility mode for older uWSGI versions,
|
||||
it also sends messages that have been queued by send().
|
||||
A return value of None means that connection was closed.
|
||||
This must be called repeatedly. For uWSGI < 2.1.x it must
|
||||
be called from the main greenlet."""
|
||||
while True:
|
||||
if self._req_ctx is not None:
|
||||
try:
|
||||
msg = uwsgi.websocket_recv(request_context=self._req_ctx)
|
||||
except IOError: # connection closed
|
||||
return None
|
||||
return self._decode_received(msg)
|
||||
else:
|
||||
# we wake up at least every 3 seconds to let uWSGI
|
||||
# do its ping/ponging
|
||||
event_set = self._event.wait(timeout=3)
|
||||
if event_set:
|
||||
self._event.clear()
|
||||
# maybe there is something to send
|
||||
msgs = []
|
||||
while True:
|
||||
try:
|
||||
msgs.append(self._send_queue.get(block=False))
|
||||
except gevent.queue.Empty:
|
||||
break
|
||||
for msg in msgs:
|
||||
self._send(msg)
|
||||
# maybe there is something to receive, if not, at least
|
||||
# ensure uWSGI does its ping/ponging
|
||||
try:
|
||||
msg = uwsgi.websocket_recv_nb()
|
||||
except IOError: # connection closed
|
||||
self._select_greenlet.kill()
|
||||
return None
|
||||
if msg: # message available
|
||||
return self._decode_received(msg)
|
||||
|
||||
|
||||
_async = {
|
||||
'thread': Thread,
|
||||
'queue': queue.JoinableQueue,
|
||||
'queue_empty': queue.Empty,
|
||||
'event': Event,
|
||||
'websocket': uWSGIWebSocket if _websocket_available else None,
|
||||
'sleep': gevent.sleep,
|
||||
}
|
143
matteo_env/Lib/site-packages/engineio/async_drivers/sanic.py
Normal file
143
matteo_env/Lib/site-packages/engineio/async_drivers/sanic.py
Normal file
|
@ -0,0 +1,143 @@
|
|||
import sys
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
try: # pragma: no cover
|
||||
from sanic.response import HTTPResponse
|
||||
from sanic.websocket import WebSocketProtocol
|
||||
except ImportError:
|
||||
HTTPResponse = None
|
||||
WebSocketProtocol = None
|
||||
|
||||
|
||||
def create_route(app, engineio_server, engineio_endpoint): # pragma: no cover
|
||||
"""This function sets up the engine.io endpoint as a route for the
|
||||
application.
|
||||
|
||||
Note that both GET and POST requests must be hooked up on the engine.io
|
||||
endpoint.
|
||||
"""
|
||||
app.add_route(engineio_server.handle_request, engineio_endpoint,
|
||||
methods=['GET', 'POST', 'OPTIONS'])
|
||||
try:
|
||||
app.enable_websocket()
|
||||
except AttributeError:
|
||||
# ignore, this version does not support websocket
|
||||
pass
|
||||
|
||||
|
||||
def translate_request(request): # pragma: no cover
|
||||
"""This function takes the arguments passed to the request handler and
|
||||
uses them to generate a WSGI compatible environ dictionary.
|
||||
"""
|
||||
class AwaitablePayload(object):
|
||||
def __init__(self, payload):
|
||||
self.payload = payload or b''
|
||||
|
||||
async def read(self, length=None):
|
||||
if length is None:
|
||||
r = self.payload
|
||||
self.payload = b''
|
||||
else:
|
||||
r = self.payload[:length]
|
||||
self.payload = self.payload[length:]
|
||||
return r
|
||||
|
||||
uri_parts = urlsplit(request.url)
|
||||
environ = {
|
||||
'wsgi.input': AwaitablePayload(request.body),
|
||||
'wsgi.errors': sys.stderr,
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.async': True,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False,
|
||||
'SERVER_SOFTWARE': 'sanic',
|
||||
'REQUEST_METHOD': request.method,
|
||||
'QUERY_STRING': uri_parts.query or '',
|
||||
'RAW_URI': request.url,
|
||||
'SERVER_PROTOCOL': 'HTTP/' + request.version,
|
||||
'REMOTE_ADDR': '127.0.0.1',
|
||||
'REMOTE_PORT': '0',
|
||||
'SERVER_NAME': 'sanic',
|
||||
'SERVER_PORT': '0',
|
||||
'sanic.request': request
|
||||
}
|
||||
|
||||
for hdr_name, hdr_value in request.headers.items():
|
||||
hdr_name = hdr_name.upper()
|
||||
if hdr_name == 'CONTENT-TYPE':
|
||||
environ['CONTENT_TYPE'] = hdr_value
|
||||
continue
|
||||
elif hdr_name == 'CONTENT-LENGTH':
|
||||
environ['CONTENT_LENGTH'] = hdr_value
|
||||
continue
|
||||
|
||||
key = 'HTTP_%s' % hdr_name.replace('-', '_')
|
||||
if key in environ:
|
||||
hdr_value = '%s,%s' % (environ[key], hdr_value)
|
||||
|
||||
environ[key] = hdr_value
|
||||
|
||||
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
|
||||
|
||||
path_info = uri_parts.path
|
||||
|
||||
environ['PATH_INFO'] = path_info
|
||||
environ['SCRIPT_NAME'] = ''
|
||||
|
||||
return environ
|
||||
|
||||
|
||||
def make_response(status, headers, payload, environ): # pragma: no cover
|
||||
"""This function generates an appropriate response object for this async
|
||||
mode.
|
||||
"""
|
||||
headers_dict = {}
|
||||
content_type = None
|
||||
for h in headers:
|
||||
if h[0].lower() == 'content-type':
|
||||
content_type = h[1]
|
||||
else:
|
||||
headers_dict[h[0]] = h[1]
|
||||
return HTTPResponse(body_bytes=payload, content_type=content_type,
|
||||
status=int(status.split()[0]), headers=headers_dict)
|
||||
|
||||
|
||||
class WebSocket(object): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides a sanic WebSocket interface that is
|
||||
somewhat compatible with eventlet's implementation.
|
||||
"""
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self._sock = None
|
||||
|
||||
async def __call__(self, environ):
|
||||
request = environ['sanic.request']
|
||||
protocol = request.transport.get_protocol()
|
||||
self._sock = await protocol.websocket_handshake(request)
|
||||
|
||||
self.environ = environ
|
||||
await self.handler(self)
|
||||
|
||||
async def close(self):
|
||||
await self._sock.close()
|
||||
|
||||
async def send(self, message):
|
||||
await self._sock.send(message)
|
||||
|
||||
async def wait(self):
|
||||
data = await self._sock.recv()
|
||||
if not isinstance(data, bytes) and \
|
||||
not isinstance(data, str):
|
||||
raise IOError()
|
||||
return data
|
||||
|
||||
|
||||
_async = {
|
||||
'asyncio': True,
|
||||
'create_route': create_route,
|
||||
'translate_request': translate_request,
|
||||
'make_response': make_response,
|
||||
'websocket': WebSocket if WebSocketProtocol else None,
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
from __future__ import absolute_import
|
||||
import threading
|
||||
import time
|
||||
|
||||
try:
|
||||
import queue
|
||||
except ImportError: # pragma: no cover
|
||||
import Queue as queue
|
||||
|
||||
_async = {
|
||||
'thread': threading.Thread,
|
||||
'queue': queue.Queue,
|
||||
'queue_empty': queue.Empty,
|
||||
'event': threading.Event,
|
||||
'websocket': None,
|
||||
'sleep': time.sleep,
|
||||
}
|
182
matteo_env/Lib/site-packages/engineio/async_drivers/tornado.py
Normal file
182
matteo_env/Lib/site-packages/engineio/async_drivers/tornado.py
Normal file
|
@ -0,0 +1,182 @@
|
|||
import asyncio
|
||||
import sys
|
||||
from urllib.parse import urlsplit
|
||||
from .. import exceptions
|
||||
|
||||
import tornado.web
|
||||
import tornado.websocket
|
||||
|
||||
|
||||
def get_tornado_handler(engineio_server):
|
||||
class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if isinstance(engineio_server.cors_allowed_origins, str):
|
||||
if engineio_server.cors_allowed_origins == '*':
|
||||
self.allowed_origins = None
|
||||
else:
|
||||
self.allowed_origins = [
|
||||
engineio_server.cors_allowed_origins]
|
||||
else:
|
||||
self.allowed_origins = engineio_server.cors_allowed_origins
|
||||
self.receive_queue = asyncio.Queue()
|
||||
|
||||
async def get(self, *args, **kwargs):
|
||||
if self.request.headers.get('Upgrade', '').lower() == 'websocket':
|
||||
ret = super().get(*args, **kwargs)
|
||||
if asyncio.iscoroutine(ret):
|
||||
await ret
|
||||
else:
|
||||
await engineio_server.handle_request(self)
|
||||
|
||||
async def open(self, *args, **kwargs):
|
||||
# this is the handler for the websocket request
|
||||
asyncio.ensure_future(engineio_server.handle_request(self))
|
||||
|
||||
async def post(self, *args, **kwargs):
|
||||
await engineio_server.handle_request(self)
|
||||
|
||||
async def options(self, *args, **kwargs):
|
||||
await engineio_server.handle_request(self)
|
||||
|
||||
async def on_message(self, message):
|
||||
await self.receive_queue.put(message)
|
||||
|
||||
async def get_next_message(self):
|
||||
return await self.receive_queue.get()
|
||||
|
||||
def on_close(self):
|
||||
self.receive_queue.put_nowait(None)
|
||||
|
||||
def check_origin(self, origin):
|
||||
if self.allowed_origins is None or origin in self.allowed_origins:
|
||||
return True
|
||||
return super().check_origin(origin)
|
||||
|
||||
def get_compression_options(self):
|
||||
# enable compression
|
||||
return {}
|
||||
|
||||
return Handler
|
||||
|
||||
|
||||
def translate_request(handler):
|
||||
"""This function takes the arguments passed to the request handler and
|
||||
uses them to generate a WSGI compatible environ dictionary.
|
||||
"""
|
||||
class AwaitablePayload(object):
|
||||
def __init__(self, payload):
|
||||
self.payload = payload or b''
|
||||
|
||||
async def read(self, length=None):
|
||||
if length is None:
|
||||
r = self.payload
|
||||
self.payload = b''
|
||||
else:
|
||||
r = self.payload[:length]
|
||||
self.payload = self.payload[length:]
|
||||
return r
|
||||
|
||||
payload = handler.request.body
|
||||
|
||||
uri_parts = urlsplit(handler.request.path)
|
||||
full_uri = handler.request.path
|
||||
if handler.request.query: # pragma: no cover
|
||||
full_uri += '?' + handler.request.query
|
||||
environ = {
|
||||
'wsgi.input': AwaitablePayload(payload),
|
||||
'wsgi.errors': sys.stderr,
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.async': True,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False,
|
||||
'SERVER_SOFTWARE': 'aiohttp',
|
||||
'REQUEST_METHOD': handler.request.method,
|
||||
'QUERY_STRING': handler.request.query or '',
|
||||
'RAW_URI': full_uri,
|
||||
'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version,
|
||||
'REMOTE_ADDR': '127.0.0.1',
|
||||
'REMOTE_PORT': '0',
|
||||
'SERVER_NAME': 'aiohttp',
|
||||
'SERVER_PORT': '0',
|
||||
'tornado.handler': handler
|
||||
}
|
||||
|
||||
for hdr_name, hdr_value in handler.request.headers.items():
|
||||
hdr_name = hdr_name.upper()
|
||||
if hdr_name == 'CONTENT-TYPE':
|
||||
environ['CONTENT_TYPE'] = hdr_value
|
||||
continue
|
||||
elif hdr_name == 'CONTENT-LENGTH':
|
||||
environ['CONTENT_LENGTH'] = hdr_value
|
||||
continue
|
||||
|
||||
key = 'HTTP_%s' % hdr_name.replace('-', '_')
|
||||
environ[key] = hdr_value
|
||||
|
||||
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
|
||||
|
||||
path_info = uri_parts.path
|
||||
|
||||
environ['PATH_INFO'] = path_info
|
||||
environ['SCRIPT_NAME'] = ''
|
||||
|
||||
return environ
|
||||
|
||||
|
||||
def make_response(status, headers, payload, environ):
|
||||
"""This function generates an appropriate response object for this async
|
||||
mode.
|
||||
"""
|
||||
tornado_handler = environ['tornado.handler']
|
||||
try:
|
||||
tornado_handler.set_status(int(status.split()[0]))
|
||||
except RuntimeError: # pragma: no cover
|
||||
# for websocket connections Tornado does not accept a response, since
|
||||
# it already emitted the 101 status code
|
||||
return
|
||||
for header, value in headers:
|
||||
tornado_handler.set_header(header, value)
|
||||
tornado_handler.write(payload)
|
||||
tornado_handler.finish()
|
||||
|
||||
|
||||
class WebSocket(object): # pragma: no cover
|
||||
"""
|
||||
This wrapper class provides a tornado WebSocket interface that is
|
||||
somewhat compatible with eventlet's implementation.
|
||||
"""
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self.tornado_handler = None
|
||||
|
||||
async def __call__(self, environ):
|
||||
self.tornado_handler = environ['tornado.handler']
|
||||
self.environ = environ
|
||||
await self.handler(self)
|
||||
|
||||
async def close(self):
|
||||
self.tornado_handler.close()
|
||||
|
||||
async def send(self, message):
|
||||
try:
|
||||
self.tornado_handler.write_message(
|
||||
message, binary=isinstance(message, bytes))
|
||||
except tornado.websocket.WebSocketClosedError:
|
||||
raise exceptions.EngineIOError()
|
||||
|
||||
async def wait(self):
|
||||
msg = await self.tornado_handler.get_next_message()
|
||||
if not isinstance(msg, bytes) and \
|
||||
not isinstance(msg, str):
|
||||
raise IOError()
|
||||
return msg
|
||||
|
||||
|
||||
_async = {
|
||||
'asyncio': True,
|
||||
'translate_request': translate_request,
|
||||
'make_response': make_response,
|
||||
'websocket': WebSocket,
|
||||
}
|
608
matteo_env/Lib/site-packages/engineio/asyncio_client.py
Normal file
608
matteo_env/Lib/site-packages/engineio/asyncio_client.py
Normal file
|
@ -0,0 +1,608 @@
|
|||
import asyncio
|
||||
import signal
|
||||
import ssl
|
||||
import threading
|
||||
|
||||
try:
|
||||
import aiohttp
|
||||
except ImportError: # pragma: no cover
|
||||
aiohttp = None
|
||||
|
||||
from . import client
|
||||
from . import exceptions
|
||||
from . import packet
|
||||
from . import payload
|
||||
|
||||
async_signal_handler_set = False
|
||||
|
||||
|
||||
def async_signal_handler():
|
||||
"""SIGINT handler.
|
||||
|
||||
Disconnect all active async clients.
|
||||
"""
|
||||
async def _handler():
|
||||
asyncio.get_event_loop().stop()
|
||||
for c in client.connected_clients[:]:
|
||||
if c.is_asyncio_based():
|
||||
await c.disconnect()
|
||||
else: # pragma: no cover
|
||||
pass
|
||||
|
||||
asyncio.ensure_future(_handler())
|
||||
|
||||
|
||||
class AsyncClient(client.Client):
|
||||
"""An Engine.IO client for asyncio.
|
||||
|
||||
This class implements a fully compliant Engine.IO web client with support
|
||||
for websocket and long-polling transports, compatible with the asyncio
|
||||
framework on Python 3.5 or newer.
|
||||
|
||||
:param logger: To enable logging set to ``True`` or pass a logger object to
|
||||
use. To disable logging set to ``False``. The default is
|
||||
``False``. Note that fatal errors are logged even when
|
||||
``logger`` is ``False``.
|
||||
:param json: An alternative json module to use for encoding and decoding
|
||||
packets. Custom json modules must have ``dumps`` and ``loads``
|
||||
functions that are compatible with the standard library
|
||||
versions.
|
||||
:param request_timeout: A timeout in seconds for requests. The default is
|
||||
5 seconds.
|
||||
:param http_session: an initialized ``aiohttp.ClientSession`` object to be
|
||||
used when sending requests to the server. Use it if
|
||||
you need to add special client options such as proxy
|
||||
servers, SSL certificates, etc.
|
||||
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
|
||||
skip SSL certificate verification, allowing
|
||||
connections to servers with self signed certificates.
|
||||
The default is ``True``.
|
||||
"""
|
||||
def is_asyncio_based(self):
|
||||
return True
|
||||
|
||||
async def connect(self, url, headers=None, transports=None,
|
||||
engineio_path='engine.io'):
|
||||
"""Connect to an Engine.IO server.
|
||||
|
||||
:param url: The URL of the Engine.IO server. It can include custom
|
||||
query string parameters if required by the server.
|
||||
:param headers: A dictionary with custom headers to send with the
|
||||
connection request.
|
||||
:param transports: The list of allowed transports. Valid transports
|
||||
are ``'polling'`` and ``'websocket'``. If not
|
||||
given, the polling transport is connected first,
|
||||
then an upgrade to websocket is attempted.
|
||||
:param engineio_path: The endpoint where the Engine.IO server is
|
||||
installed. The default value is appropriate for
|
||||
most cases.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
|
||||
Example usage::
|
||||
|
||||
eio = engineio.Client()
|
||||
await eio.connect('http://localhost:5000')
|
||||
"""
|
||||
global async_signal_handler_set
|
||||
if not async_signal_handler_set and \
|
||||
threading.current_thread() == threading.main_thread():
|
||||
|
||||
try:
|
||||
asyncio.get_event_loop().add_signal_handler(
|
||||
signal.SIGINT, async_signal_handler)
|
||||
async_signal_handler_set = True
|
||||
except NotImplementedError: # pragma: no cover
|
||||
self.logger.warning('Signal handler is unsupported')
|
||||
|
||||
if self.state != 'disconnected':
|
||||
raise ValueError('Client is not in a disconnected state')
|
||||
valid_transports = ['polling', 'websocket']
|
||||
if transports is not None:
|
||||
if isinstance(transports, str):
|
||||
transports = [transports]
|
||||
transports = [transport for transport in transports
|
||||
if transport in valid_transports]
|
||||
if not transports:
|
||||
raise ValueError('No valid transports provided')
|
||||
self.transports = transports or valid_transports
|
||||
self.queue = self.create_queue()
|
||||
return await getattr(self, '_connect_' + self.transports[0])(
|
||||
url, headers or {}, engineio_path)
|
||||
|
||||
async def wait(self):
|
||||
"""Wait until the connection with the server ends.
|
||||
|
||||
Client applications can use this function to block the main thread
|
||||
during the life of the connection.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
if self.read_loop_task:
|
||||
await self.read_loop_task
|
||||
|
||||
async def send(self, data):
|
||||
"""Send a message to a client.
|
||||
|
||||
:param data: The data to send to the client. Data can be of type
|
||||
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
|
||||
or ``dict``, the data will be serialized as JSON.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
await self._send_packet(packet.Packet(packet.MESSAGE, data=data))
|
||||
|
||||
async def disconnect(self, abort=False):
|
||||
"""Disconnect from the server.
|
||||
|
||||
:param abort: If set to ``True``, do not wait for background tasks
|
||||
associated with the connection to end.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
if self.state == 'connected':
|
||||
await self._send_packet(packet.Packet(packet.CLOSE))
|
||||
await self.queue.put(None)
|
||||
self.state = 'disconnecting'
|
||||
await self._trigger_event('disconnect', run_async=False)
|
||||
if self.current_transport == 'websocket':
|
||||
await self.ws.close()
|
||||
if not abort:
|
||||
await self.read_loop_task
|
||||
self.state = 'disconnected'
|
||||
try:
|
||||
client.connected_clients.remove(self)
|
||||
except ValueError: # pragma: no cover
|
||||
pass
|
||||
self._reset()
|
||||
|
||||
def start_background_task(self, target, *args, **kwargs):
|
||||
"""Start a background task.
|
||||
|
||||
This is a utility function that applications can use to start a
|
||||
background task.
|
||||
|
||||
:param target: the target function to execute.
|
||||
:param args: arguments to pass to the function.
|
||||
:param kwargs: keyword arguments to pass to the function.
|
||||
|
||||
This function returns an object compatible with the `Thread` class in
|
||||
the Python standard library. The `start()` method on this object is
|
||||
already called by this function.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
return asyncio.ensure_future(target(*args, **kwargs))
|
||||
|
||||
async def sleep(self, seconds=0):
|
||||
"""Sleep for the requested amount of time.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
return await asyncio.sleep(seconds)
|
||||
|
||||
def create_queue(self):
|
||||
"""Create a queue object."""
|
||||
q = asyncio.Queue()
|
||||
q.Empty = asyncio.QueueEmpty
|
||||
return q
|
||||
|
||||
def create_event(self):
|
||||
"""Create an event object."""
|
||||
return asyncio.Event()
|
||||
|
||||
def _reset(self):
|
||||
if self.http: # pragma: no cover
|
||||
asyncio.ensure_future(self.http.close())
|
||||
super()._reset()
|
||||
|
||||
async def _connect_polling(self, url, headers, engineio_path):
|
||||
"""Establish a long-polling connection to the Engine.IO server."""
|
||||
if aiohttp is None: # pragma: no cover
|
||||
self.logger.error('aiohttp not installed -- cannot make HTTP '
|
||||
'requests!')
|
||||
return
|
||||
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
|
||||
self.logger.info('Attempting polling connection to ' + self.base_url)
|
||||
r = await self._send_request(
|
||||
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
|
||||
timeout=self.request_timeout)
|
||||
if r is None:
|
||||
self._reset()
|
||||
raise exceptions.ConnectionError(
|
||||
'Connection refused by the server')
|
||||
if r.status < 200 or r.status >= 300:
|
||||
self._reset()
|
||||
try:
|
||||
arg = await r.json()
|
||||
except aiohttp.ClientError:
|
||||
arg = None
|
||||
raise exceptions.ConnectionError(
|
||||
'Unexpected status code {} in server response'.format(
|
||||
r.status), arg)
|
||||
try:
|
||||
p = payload.Payload(encoded_payload=(await r.read()).decode(
|
||||
'utf-8'))
|
||||
except ValueError:
|
||||
raise exceptions.ConnectionError(
|
||||
'Unexpected response from server') from None
|
||||
open_packet = p.packets[0]
|
||||
if open_packet.packet_type != packet.OPEN:
|
||||
raise exceptions.ConnectionError(
|
||||
'OPEN packet not returned by server')
|
||||
self.logger.info(
|
||||
'Polling connection accepted with ' + str(open_packet.data))
|
||||
self.sid = open_packet.data['sid']
|
||||
self.upgrades = open_packet.data['upgrades']
|
||||
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
|
||||
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
|
||||
self.current_transport = 'polling'
|
||||
self.base_url += '&sid=' + self.sid
|
||||
|
||||
self.state = 'connected'
|
||||
client.connected_clients.append(self)
|
||||
await self._trigger_event('connect', run_async=False)
|
||||
|
||||
for pkt in p.packets[1:]:
|
||||
await self._receive_packet(pkt)
|
||||
|
||||
if 'websocket' in self.upgrades and 'websocket' in self.transports:
|
||||
# attempt to upgrade to websocket
|
||||
if await self._connect_websocket(url, headers, engineio_path):
|
||||
# upgrade to websocket succeeded, we're done here
|
||||
return
|
||||
|
||||
self.write_loop_task = self.start_background_task(self._write_loop)
|
||||
self.read_loop_task = self.start_background_task(
|
||||
self._read_loop_polling)
|
||||
|
||||
async def _connect_websocket(self, url, headers, engineio_path):
|
||||
"""Establish or upgrade to a WebSocket connection with the server."""
|
||||
if aiohttp is None: # pragma: no cover
|
||||
self.logger.error('aiohttp package not installed')
|
||||
return False
|
||||
websocket_url = self._get_engineio_url(url, engineio_path,
|
||||
'websocket')
|
||||
if self.sid:
|
||||
self.logger.info(
|
||||
'Attempting WebSocket upgrade to ' + websocket_url)
|
||||
upgrade = True
|
||||
websocket_url += '&sid=' + self.sid
|
||||
else:
|
||||
upgrade = False
|
||||
self.base_url = websocket_url
|
||||
self.logger.info(
|
||||
'Attempting WebSocket connection to ' + websocket_url)
|
||||
|
||||
if self.http is None or self.http.closed: # pragma: no cover
|
||||
self.http = aiohttp.ClientSession()
|
||||
|
||||
# extract any new cookies passed in a header so that they can also be
|
||||
# sent the the WebSocket route
|
||||
cookies = {}
|
||||
for header, value in headers.items():
|
||||
if header.lower() == 'cookie':
|
||||
cookies = dict(
|
||||
[cookie.split('=', 1) for cookie in value.split('; ')])
|
||||
del headers[header]
|
||||
break
|
||||
self.http.cookie_jar.update_cookies(cookies)
|
||||
|
||||
try:
|
||||
if not self.ssl_verify:
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
ws = await self.http.ws_connect(
|
||||
websocket_url + self._get_url_timestamp(),
|
||||
headers=headers, ssl=ssl_context)
|
||||
else:
|
||||
ws = await self.http.ws_connect(
|
||||
websocket_url + self._get_url_timestamp(),
|
||||
headers=headers)
|
||||
except (aiohttp.client_exceptions.WSServerHandshakeError,
|
||||
aiohttp.client_exceptions.ServerConnectionError,
|
||||
aiohttp.client_exceptions.ClientConnectionError):
|
||||
if upgrade:
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: connection error')
|
||||
return False
|
||||
else:
|
||||
raise exceptions.ConnectionError('Connection error')
|
||||
if upgrade:
|
||||
p = packet.Packet(packet.PING, data='probe').encode()
|
||||
try:
|
||||
await ws.send_str(p)
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: unexpected send exception: %s',
|
||||
str(e))
|
||||
return False
|
||||
try:
|
||||
p = (await ws.receive()).data
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: unexpected recv exception: %s',
|
||||
str(e))
|
||||
return False
|
||||
pkt = packet.Packet(encoded_packet=p)
|
||||
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: no PONG packet')
|
||||
return False
|
||||
p = packet.Packet(packet.UPGRADE).encode()
|
||||
try:
|
||||
await ws.send_str(p)
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: unexpected send exception: %s',
|
||||
str(e))
|
||||
return False
|
||||
self.current_transport = 'websocket'
|
||||
self.logger.info('WebSocket upgrade was successful')
|
||||
else:
|
||||
try:
|
||||
p = (await ws.receive()).data
|
||||
except Exception as e: # pragma: no cover
|
||||
raise exceptions.ConnectionError(
|
||||
'Unexpected recv exception: ' + str(e))
|
||||
open_packet = packet.Packet(encoded_packet=p)
|
||||
if open_packet.packet_type != packet.OPEN:
|
||||
raise exceptions.ConnectionError('no OPEN packet')
|
||||
self.logger.info(
|
||||
'WebSocket connection accepted with ' + str(open_packet.data))
|
||||
self.sid = open_packet.data['sid']
|
||||
self.upgrades = open_packet.data['upgrades']
|
||||
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
|
||||
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
|
||||
self.current_transport = 'websocket'
|
||||
|
||||
self.state = 'connected'
|
||||
client.connected_clients.append(self)
|
||||
await self._trigger_event('connect', run_async=False)
|
||||
|
||||
self.ws = ws
|
||||
self.write_loop_task = self.start_background_task(self._write_loop)
|
||||
self.read_loop_task = self.start_background_task(
|
||||
self._read_loop_websocket)
|
||||
return True
|
||||
|
||||
async def _receive_packet(self, pkt):
|
||||
"""Handle incoming packets from the server."""
|
||||
packet_name = packet.packet_names[pkt.packet_type] \
|
||||
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
|
||||
self.logger.info(
|
||||
'Received packet %s data %s', packet_name,
|
||||
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
|
||||
if pkt.packet_type == packet.MESSAGE:
|
||||
await self._trigger_event('message', pkt.data, run_async=True)
|
||||
elif pkt.packet_type == packet.PING:
|
||||
await self._send_packet(packet.Packet(packet.PONG, pkt.data))
|
||||
elif pkt.packet_type == packet.CLOSE:
|
||||
await self.disconnect(abort=True)
|
||||
elif pkt.packet_type == packet.NOOP:
|
||||
pass
|
||||
else:
|
||||
self.logger.error('Received unexpected packet of type %s',
|
||||
pkt.packet_type)
|
||||
|
||||
async def _send_packet(self, pkt):
|
||||
"""Queue a packet to be sent to the server."""
|
||||
if self.state != 'connected':
|
||||
return
|
||||
await self.queue.put(pkt)
|
||||
self.logger.info(
|
||||
'Sending packet %s data %s',
|
||||
packet.packet_names[pkt.packet_type],
|
||||
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
|
||||
|
||||
async def _send_request(
|
||||
self, method, url, headers=None, body=None,
|
||||
timeout=None): # pragma: no cover
|
||||
if self.http is None or self.http.closed:
|
||||
self.http = aiohttp.ClientSession()
|
||||
http_method = getattr(self.http, method.lower())
|
||||
|
||||
try:
|
||||
if not self.ssl_verify:
|
||||
return await http_method(
|
||||
url, headers=headers, data=body,
|
||||
timeout=aiohttp.ClientTimeout(total=timeout), ssl=False)
|
||||
else:
|
||||
return await http_method(
|
||||
url, headers=headers, data=body,
|
||||
timeout=aiohttp.ClientTimeout(total=timeout))
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as exc:
|
||||
self.logger.info('HTTP %s request to %s failed with error %s.',
|
||||
method, url, exc)
|
||||
|
||||
async def _trigger_event(self, event, *args, **kwargs):
|
||||
"""Invoke an event handler."""
|
||||
run_async = kwargs.pop('run_async', False)
|
||||
ret = None
|
||||
if event in self.handlers:
|
||||
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
|
||||
if run_async:
|
||||
return self.start_background_task(self.handlers[event],
|
||||
*args)
|
||||
else:
|
||||
try:
|
||||
ret = await self.handlers[event](*args)
|
||||
except asyncio.CancelledError: # pragma: no cover
|
||||
pass
|
||||
except:
|
||||
self.logger.exception(event + ' async handler error')
|
||||
if event == 'connect':
|
||||
# if connect handler raised error we reject the
|
||||
# connection
|
||||
return False
|
||||
else:
|
||||
if run_async:
|
||||
async def async_handler():
|
||||
return self.handlers[event](*args)
|
||||
|
||||
return self.start_background_task(async_handler)
|
||||
else:
|
||||
try:
|
||||
ret = self.handlers[event](*args)
|
||||
except:
|
||||
self.logger.exception(event + ' handler error')
|
||||
if event == 'connect':
|
||||
# if connect handler raised error we reject the
|
||||
# connection
|
||||
return False
|
||||
return ret
|
||||
|
||||
async def _read_loop_polling(self):
|
||||
"""Read packets by polling the Engine.IO server."""
|
||||
while self.state == 'connected':
|
||||
self.logger.info(
|
||||
'Sending polling GET request to ' + self.base_url)
|
||||
r = await self._send_request(
|
||||
'GET', self.base_url + self._get_url_timestamp(),
|
||||
timeout=max(self.ping_interval, self.ping_timeout) + 5)
|
||||
if r is None:
|
||||
self.logger.warning(
|
||||
'Connection refused by the server, aborting')
|
||||
await self.queue.put(None)
|
||||
break
|
||||
if r.status < 200 or r.status >= 300:
|
||||
self.logger.warning('Unexpected status code %s in server '
|
||||
'response, aborting', r.status)
|
||||
await self.queue.put(None)
|
||||
break
|
||||
try:
|
||||
p = payload.Payload(encoded_payload=(await r.read()).decode(
|
||||
'utf-8'))
|
||||
except ValueError:
|
||||
self.logger.warning(
|
||||
'Unexpected packet from server, aborting')
|
||||
await self.queue.put(None)
|
||||
break
|
||||
for pkt in p.packets:
|
||||
await self._receive_packet(pkt)
|
||||
|
||||
self.logger.info('Waiting for write loop task to end')
|
||||
await self.write_loop_task
|
||||
if self.state == 'connected':
|
||||
await self._trigger_event('disconnect', run_async=False)
|
||||
try:
|
||||
client.connected_clients.remove(self)
|
||||
except ValueError: # pragma: no cover
|
||||
pass
|
||||
self._reset()
|
||||
self.logger.info('Exiting read loop task')
|
||||
|
||||
async def _read_loop_websocket(self):
|
||||
"""Read packets from the Engine.IO WebSocket connection."""
|
||||
while self.state == 'connected':
|
||||
p = None
|
||||
try:
|
||||
p = await asyncio.wait_for(
|
||||
self.ws.receive(),
|
||||
timeout=self.ping_interval + self.ping_timeout)
|
||||
p = p.data
|
||||
if p is None: # pragma: no cover
|
||||
break # the connection is broken
|
||||
except asyncio.TimeoutError:
|
||||
self.logger.warning(
|
||||
'Server has stopped communicating, aborting')
|
||||
await self.queue.put(None)
|
||||
break
|
||||
except aiohttp.client_exceptions.ServerDisconnectedError:
|
||||
self.logger.info(
|
||||
'Read loop: WebSocket connection was closed, aborting')
|
||||
await self.queue.put(None)
|
||||
break
|
||||
except Exception as e:
|
||||
self.logger.info(
|
||||
'Unexpected error receiving packet: "%s", aborting',
|
||||
str(e))
|
||||
await self.queue.put(None)
|
||||
break
|
||||
try:
|
||||
pkt = packet.Packet(encoded_packet=p)
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.info(
|
||||
'Unexpected error decoding packet: "%s", aborting', str(e))
|
||||
await self.queue.put(None)
|
||||
break
|
||||
await self._receive_packet(pkt)
|
||||
|
||||
self.logger.info('Waiting for write loop task to end')
|
||||
await self.write_loop_task
|
||||
if self.state == 'connected':
|
||||
await self._trigger_event('disconnect', run_async=False)
|
||||
try:
|
||||
client.connected_clients.remove(self)
|
||||
except ValueError: # pragma: no cover
|
||||
pass
|
||||
self._reset()
|
||||
self.logger.info('Exiting read loop task')
|
||||
|
||||
async def _write_loop(self):
|
||||
"""This background task sends packages to the server as they are
|
||||
pushed to the send queue.
|
||||
"""
|
||||
while self.state == 'connected':
|
||||
# to simplify the timeout handling, use the maximum of the
|
||||
# ping interval and ping timeout as timeout, with an extra 5
|
||||
# seconds grace period
|
||||
timeout = max(self.ping_interval, self.ping_timeout) + 5
|
||||
packets = None
|
||||
try:
|
||||
packets = [await asyncio.wait_for(self.queue.get(), timeout)]
|
||||
except (self.queue.Empty, asyncio.TimeoutError,
|
||||
asyncio.CancelledError):
|
||||
self.logger.error('packet queue is empty, aborting')
|
||||
break
|
||||
if packets == [None]:
|
||||
self.queue.task_done()
|
||||
packets = []
|
||||
else:
|
||||
while True:
|
||||
try:
|
||||
packets.append(self.queue.get_nowait())
|
||||
except self.queue.Empty:
|
||||
break
|
||||
if packets[-1] is None:
|
||||
packets = packets[:-1]
|
||||
self.queue.task_done()
|
||||
break
|
||||
if not packets:
|
||||
# empty packet list returned -> connection closed
|
||||
break
|
||||
if self.current_transport == 'polling':
|
||||
p = payload.Payload(packets=packets)
|
||||
r = await self._send_request(
|
||||
'POST', self.base_url, body=p.encode(),
|
||||
headers={'Content-Type': 'application/octet-stream'},
|
||||
timeout=self.request_timeout)
|
||||
for pkt in packets:
|
||||
self.queue.task_done()
|
||||
if r is None:
|
||||
self.logger.warning(
|
||||
'Connection refused by the server, aborting')
|
||||
break
|
||||
if r.status < 200 or r.status >= 300:
|
||||
self.logger.warning('Unexpected status code %s in server '
|
||||
'response, aborting', r.status)
|
||||
self._reset()
|
||||
break
|
||||
else:
|
||||
# websocket
|
||||
try:
|
||||
for pkt in packets:
|
||||
if pkt.binary:
|
||||
await self.ws.send_bytes(pkt.encode())
|
||||
else:
|
||||
await self.ws.send_str(pkt.encode())
|
||||
self.queue.task_done()
|
||||
except (aiohttp.client_exceptions.ServerDisconnectedError,
|
||||
BrokenPipeError, OSError):
|
||||
self.logger.info(
|
||||
'Write loop: WebSocket connection was closed, '
|
||||
'aborting')
|
||||
break
|
||||
self.logger.info('Exiting write loop task')
|
505
matteo_env/Lib/site-packages/engineio/asyncio_server.py
Normal file
505
matteo_env/Lib/site-packages/engineio/asyncio_server.py
Normal file
|
@ -0,0 +1,505 @@
|
|||
import asyncio
|
||||
import urllib
|
||||
|
||||
from . import exceptions
|
||||
from . import packet
|
||||
from . import server
|
||||
from . import asyncio_socket
|
||||
|
||||
|
||||
class AsyncServer(server.Server):
|
||||
"""An Engine.IO server for asyncio.
|
||||
|
||||
This class implements a fully compliant Engine.IO web server with support
|
||||
for websocket and long-polling transports, compatible with the asyncio
|
||||
framework on Python 3.5 or newer.
|
||||
|
||||
:param async_mode: The asynchronous model to use. See the Deployment
|
||||
section in the documentation for a description of the
|
||||
available options. Valid async modes are "aiohttp",
|
||||
"sanic", "tornado" and "asgi". If this argument is not
|
||||
given, "aiohttp" is tried first, followed by "sanic",
|
||||
"tornado", and finally "asgi". The first async mode that
|
||||
has all its dependencies installed is the one that is
|
||||
chosen.
|
||||
:param ping_interval: The interval in seconds at which the server pings
|
||||
the client. The default is 25 seconds. For advanced
|
||||
control, a two element tuple can be given, where
|
||||
the first number is the ping interval and the second
|
||||
is a grace period added by the server.
|
||||
:param ping_timeout: The time in seconds that the client waits for the
|
||||
server to respond before disconnecting. The default
|
||||
is 5 seconds.
|
||||
:param max_http_buffer_size: The maximum size of a message when using the
|
||||
polling transport. The default is 1,000,000
|
||||
bytes.
|
||||
:param allow_upgrades: Whether to allow transport upgrades or not.
|
||||
:param http_compression: Whether to compress packages when using the
|
||||
polling transport.
|
||||
:param compression_threshold: Only compress messages when their byte size
|
||||
is greater than this value.
|
||||
:param cookie: If set to a string, it is the name of the HTTP cookie the
|
||||
server sends back tot he client containing the client
|
||||
session id. If set to a dictionary, the ``'name'`` key
|
||||
contains the cookie name and other keys define cookie
|
||||
attributes, where the value of each attribute can be a
|
||||
string, a callable with no arguments, or a boolean. If set
|
||||
to ``None`` (the default), a cookie is not sent to the
|
||||
client.
|
||||
:param cors_allowed_origins: Origin or list of origins that are allowed to
|
||||
connect to this server. Only the same origin
|
||||
is allowed by default. Set this argument to
|
||||
``'*'`` to allow all origins, or to ``[]`` to
|
||||
disable CORS handling.
|
||||
:param cors_credentials: Whether credentials (cookies, authentication) are
|
||||
allowed in requests to this server.
|
||||
:param logger: To enable logging set to ``True`` or pass a logger object to
|
||||
use. To disable logging set to ``False``. Note that fatal
|
||||
errors are logged even when ``logger`` is ``False``.
|
||||
:param json: An alternative json module to use for encoding and decoding
|
||||
packets. Custom json modules must have ``dumps`` and ``loads``
|
||||
functions that are compatible with the standard library
|
||||
versions.
|
||||
:param async_handlers: If set to ``True``, run message event handlers in
|
||||
non-blocking threads. To run handlers synchronously,
|
||||
set to ``False``. The default is ``True``.
|
||||
:param kwargs: Reserved for future extensions, any additional parameters
|
||||
given as keyword arguments will be silently ignored.
|
||||
"""
|
||||
def is_asyncio_based(self):
|
||||
return True
|
||||
|
||||
def async_modes(self):
|
||||
return ['aiohttp', 'sanic', 'tornado', 'asgi']
|
||||
|
||||
def attach(self, app, engineio_path='engine.io'):
|
||||
"""Attach the Engine.IO server to an application."""
|
||||
engineio_path = engineio_path.strip('/')
|
||||
self._async['create_route'](app, self, '/{}/'.format(engineio_path))
|
||||
|
||||
async def send(self, sid, data):
|
||||
"""Send a message to a client.
|
||||
|
||||
:param sid: The session id of the recipient client.
|
||||
:param data: The data to send to the client. Data can be of type
|
||||
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
|
||||
or ``dict``, the data will be serialized as JSON.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
try:
|
||||
socket = self._get_socket(sid)
|
||||
except KeyError:
|
||||
# the socket is not available
|
||||
self.logger.warning('Cannot send to sid %s', sid)
|
||||
return
|
||||
await socket.send(packet.Packet(packet.MESSAGE, data=data))
|
||||
|
||||
async def get_session(self, sid):
|
||||
"""Return the user session for a client.
|
||||
|
||||
:param sid: The session id of the client.
|
||||
|
||||
The return value is a dictionary. Modifications made to this
|
||||
dictionary are not guaranteed to be preserved. If you want to modify
|
||||
the user session, use the ``session`` context manager instead.
|
||||
"""
|
||||
socket = self._get_socket(sid)
|
||||
return socket.session
|
||||
|
||||
async def save_session(self, sid, session):
|
||||
"""Store the user session for a client.
|
||||
|
||||
:param sid: The session id of the client.
|
||||
:param session: The session dictionary.
|
||||
"""
|
||||
socket = self._get_socket(sid)
|
||||
socket.session = session
|
||||
|
||||
def session(self, sid):
|
||||
"""Return the user session for a client with context manager syntax.
|
||||
|
||||
:param sid: The session id of the client.
|
||||
|
||||
This is a context manager that returns the user session dictionary for
|
||||
the client. Any changes that are made to this dictionary inside the
|
||||
context manager block are saved back to the session. Example usage::
|
||||
|
||||
@eio.on('connect')
|
||||
def on_connect(sid, environ):
|
||||
username = authenticate_user(environ)
|
||||
if not username:
|
||||
return False
|
||||
with eio.session(sid) as session:
|
||||
session['username'] = username
|
||||
|
||||
@eio.on('message')
|
||||
def on_message(sid, msg):
|
||||
async with eio.session(sid) as session:
|
||||
print('received message from ', session['username'])
|
||||
"""
|
||||
class _session_context_manager(object):
|
||||
def __init__(self, server, sid):
|
||||
self.server = server
|
||||
self.sid = sid
|
||||
self.session = None
|
||||
|
||||
async def __aenter__(self):
|
||||
self.session = await self.server.get_session(sid)
|
||||
return self.session
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
await self.server.save_session(sid, self.session)
|
||||
|
||||
return _session_context_manager(self, sid)
|
||||
|
||||
async def disconnect(self, sid=None):
|
||||
"""Disconnect a client.
|
||||
|
||||
:param sid: The session id of the client to close. If this parameter
|
||||
is not given, then all clients are closed.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
if sid is not None:
|
||||
try:
|
||||
socket = self._get_socket(sid)
|
||||
except KeyError: # pragma: no cover
|
||||
# the socket was already closed or gone
|
||||
pass
|
||||
else:
|
||||
await socket.close()
|
||||
if sid in self.sockets: # pragma: no cover
|
||||
del self.sockets[sid]
|
||||
else:
|
||||
await asyncio.wait([client.close()
|
||||
for client in self.sockets.values()])
|
||||
self.sockets = {}
|
||||
|
||||
async def handle_request(self, *args, **kwargs):
|
||||
"""Handle an HTTP request from the client.
|
||||
|
||||
This is the entry point of the Engine.IO application. This function
|
||||
returns the HTTP response to deliver to the client.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
translate_request = self._async['translate_request']
|
||||
if asyncio.iscoroutinefunction(translate_request):
|
||||
environ = await translate_request(*args, **kwargs)
|
||||
else:
|
||||
environ = translate_request(*args, **kwargs)
|
||||
|
||||
if self.cors_allowed_origins != []:
|
||||
# Validate the origin header if present
|
||||
# This is important for WebSocket more than for HTTP, since
|
||||
# browsers only apply CORS controls to HTTP.
|
||||
origin = environ.get('HTTP_ORIGIN')
|
||||
if origin:
|
||||
allowed_origins = self._cors_allowed_origins(environ)
|
||||
if allowed_origins is not None and origin not in \
|
||||
allowed_origins:
|
||||
self._log_error_once(
|
||||
origin + ' is not an accepted origin.', 'bad-origin')
|
||||
return await self._make_response(
|
||||
self._bad_request(
|
||||
origin + ' is not an accepted origin.'),
|
||||
environ)
|
||||
|
||||
method = environ['REQUEST_METHOD']
|
||||
query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
|
||||
|
||||
sid = query['sid'][0] if 'sid' in query else None
|
||||
jsonp = False
|
||||
jsonp_index = None
|
||||
|
||||
# make sure the client speaks a compatible Engine.IO version
|
||||
sid = query['sid'][0] if 'sid' in query else None
|
||||
if sid is None and query.get('EIO') != ['4']:
|
||||
self._log_error_once(
|
||||
'The client is using an unsupported version of the Socket.IO '
|
||||
'or Engine.IO protocols', 'bad-version'
|
||||
)
|
||||
return await self._make_response(self._bad_request(
|
||||
'The client is using an unsupported version of the Socket.IO '
|
||||
'or Engine.IO protocols'
|
||||
), environ)
|
||||
|
||||
if 'j' in query:
|
||||
jsonp = True
|
||||
try:
|
||||
jsonp_index = int(query['j'][0])
|
||||
except (ValueError, KeyError, IndexError):
|
||||
# Invalid JSONP index number
|
||||
pass
|
||||
|
||||
if jsonp and jsonp_index is None:
|
||||
self._log_error_once('Invalid JSONP index number',
|
||||
'bad-jsonp-index')
|
||||
r = self._bad_request('Invalid JSONP index number')
|
||||
elif method == 'GET':
|
||||
if sid is None:
|
||||
transport = query.get('transport', ['polling'])[0]
|
||||
# transport must be one of 'polling' or 'websocket'.
|
||||
# if 'websocket', the HTTP_UPGRADE header must match.
|
||||
upgrade_header = environ.get('HTTP_UPGRADE').lower() \
|
||||
if 'HTTP_UPGRADE' in environ else None
|
||||
if transport == 'polling' \
|
||||
or transport == upgrade_header == 'websocket':
|
||||
r = await self._handle_connect(environ, transport,
|
||||
jsonp_index)
|
||||
else:
|
||||
self._log_error_once('Invalid transport ' + transport,
|
||||
'bad-transport')
|
||||
r = self._bad_request('Invalid transport ' + transport)
|
||||
else:
|
||||
if sid not in self.sockets:
|
||||
self._log_error_once('Invalid session ' + sid, 'bad-sid')
|
||||
r = self._bad_request('Invalid session ' + sid)
|
||||
else:
|
||||
socket = self._get_socket(sid)
|
||||
try:
|
||||
packets = await socket.handle_get_request(environ)
|
||||
if isinstance(packets, list):
|
||||
r = self._ok(packets, jsonp_index=jsonp_index)
|
||||
else:
|
||||
r = packets
|
||||
except exceptions.EngineIOError:
|
||||
if sid in self.sockets: # pragma: no cover
|
||||
await self.disconnect(sid)
|
||||
r = self._bad_request()
|
||||
if sid in self.sockets and self.sockets[sid].closed:
|
||||
del self.sockets[sid]
|
||||
elif method == 'POST':
|
||||
if sid is None or sid not in self.sockets:
|
||||
self._log_error_once('Invalid session ' + sid, 'bad-sid')
|
||||
r = self._bad_request('Invalid session ' + sid)
|
||||
else:
|
||||
socket = self._get_socket(sid)
|
||||
try:
|
||||
await socket.handle_post_request(environ)
|
||||
r = self._ok(jsonp_index=jsonp_index)
|
||||
except exceptions.EngineIOError:
|
||||
if sid in self.sockets: # pragma: no cover
|
||||
await self.disconnect(sid)
|
||||
r = self._bad_request()
|
||||
except: # pragma: no cover
|
||||
# for any other unexpected errors, we log the error
|
||||
# and keep going
|
||||
self.logger.exception('post request handler error')
|
||||
r = self._ok(jsonp_index=jsonp_index)
|
||||
elif method == 'OPTIONS':
|
||||
r = self._ok()
|
||||
else:
|
||||
self.logger.warning('Method %s not supported', method)
|
||||
r = self._method_not_found()
|
||||
if not isinstance(r, dict):
|
||||
return r
|
||||
if self.http_compression and \
|
||||
len(r['response']) >= self.compression_threshold:
|
||||
encodings = [e.split(';')[0].strip() for e in
|
||||
environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
|
||||
for encoding in encodings:
|
||||
if encoding in self.compression_methods:
|
||||
r['response'] = \
|
||||
getattr(self, '_' + encoding)(r['response'])
|
||||
r['headers'] += [('Content-Encoding', encoding)]
|
||||
break
|
||||
return await self._make_response(r, environ)
|
||||
|
||||
def start_background_task(self, target, *args, **kwargs):
|
||||
"""Start a background task using the appropriate async model.
|
||||
|
||||
This is a utility function that applications can use to start a
|
||||
background task using the method that is compatible with the
|
||||
selected async mode.
|
||||
|
||||
:param target: the target function to execute.
|
||||
:param args: arguments to pass to the function.
|
||||
:param kwargs: keyword arguments to pass to the function.
|
||||
|
||||
The return value is a ``asyncio.Task`` object.
|
||||
"""
|
||||
return asyncio.ensure_future(target(*args, **kwargs))
|
||||
|
||||
async def sleep(self, seconds=0):
|
||||
"""Sleep for the requested amount of time using the appropriate async
|
||||
model.
|
||||
|
||||
This is a utility function that applications can use to put a task to
|
||||
sleep without having to worry about using the correct call for the
|
||||
selected async mode.
|
||||
|
||||
Note: this method is a coroutine.
|
||||
"""
|
||||
return await asyncio.sleep(seconds)
|
||||
|
||||
def create_queue(self, *args, **kwargs):
|
||||
"""Create a queue object using the appropriate async model.
|
||||
|
||||
This is a utility function that applications can use to create a queue
|
||||
without having to worry about using the correct call for the selected
|
||||
async mode. For asyncio based async modes, this returns an instance of
|
||||
``asyncio.Queue``.
|
||||
"""
|
||||
return asyncio.Queue(*args, **kwargs)
|
||||
|
||||
def get_queue_empty_exception(self):
|
||||
"""Return the queue empty exception for the appropriate async model.
|
||||
|
||||
This is a utility function that applications can use to work with a
|
||||
queue without having to worry about using the correct call for the
|
||||
selected async mode. For asyncio based async modes, this returns an
|
||||
instance of ``asyncio.QueueEmpty``.
|
||||
"""
|
||||
return asyncio.QueueEmpty
|
||||
|
||||
def create_event(self, *args, **kwargs):
|
||||
"""Create an event object using the appropriate async model.
|
||||
|
||||
This is a utility function that applications can use to create an
|
||||
event without having to worry about using the correct call for the
|
||||
selected async mode. For asyncio based async modes, this returns
|
||||
an instance of ``asyncio.Event``.
|
||||
"""
|
||||
return asyncio.Event(*args, **kwargs)
|
||||
|
||||
async def _make_response(self, response_dict, environ):
|
||||
cors_headers = self._cors_headers(environ)
|
||||
make_response = self._async['make_response']
|
||||
if asyncio.iscoroutinefunction(make_response):
|
||||
response = await make_response(
|
||||
response_dict['status'],
|
||||
response_dict['headers'] + cors_headers,
|
||||
response_dict['response'], environ)
|
||||
else:
|
||||
response = make_response(
|
||||
response_dict['status'],
|
||||
response_dict['headers'] + cors_headers,
|
||||
response_dict['response'], environ)
|
||||
return response
|
||||
|
||||
async def _handle_connect(self, environ, transport, jsonp_index=None):
|
||||
"""Handle a client connection request."""
|
||||
if self.start_service_task:
|
||||
# start the service task to monitor connected clients
|
||||
self.start_service_task = False
|
||||
self.start_background_task(self._service_task)
|
||||
|
||||
sid = self.generate_id()
|
||||
s = asyncio_socket.AsyncSocket(self, sid)
|
||||
self.sockets[sid] = s
|
||||
|
||||
pkt = packet.Packet(
|
||||
packet.OPEN, {'sid': sid,
|
||||
'upgrades': self._upgrades(sid, transport),
|
||||
'pingTimeout': int(self.ping_timeout * 1000),
|
||||
'pingInterval': int(self.ping_interval * 1000)})
|
||||
await s.send(pkt)
|
||||
s.schedule_ping()
|
||||
|
||||
ret = await self._trigger_event('connect', sid, environ,
|
||||
run_async=False)
|
||||
if ret is not None and ret is not True:
|
||||
del self.sockets[sid]
|
||||
self.logger.warning('Application rejected connection')
|
||||
return self._unauthorized(ret or None)
|
||||
|
||||
if transport == 'websocket':
|
||||
ret = await s.handle_get_request(environ)
|
||||
if s.closed and sid in self.sockets:
|
||||
# websocket connection ended, so we are done
|
||||
del self.sockets[sid]
|
||||
return ret
|
||||
else:
|
||||
s.connected = True
|
||||
headers = None
|
||||
if self.cookie:
|
||||
if isinstance(self.cookie, dict):
|
||||
headers = [(
|
||||
'Set-Cookie',
|
||||
self._generate_sid_cookie(sid, self.cookie)
|
||||
)]
|
||||
else:
|
||||
headers = [(
|
||||
'Set-Cookie',
|
||||
self._generate_sid_cookie(sid, {
|
||||
'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
|
||||
})
|
||||
)]
|
||||
try:
|
||||
return self._ok(await s.poll(), headers=headers,
|
||||
jsonp_index=jsonp_index)
|
||||
except exceptions.QueueEmpty:
|
||||
return self._bad_request()
|
||||
|
||||
async def _trigger_event(self, event, *args, **kwargs):
|
||||
"""Invoke an event handler."""
|
||||
run_async = kwargs.pop('run_async', False)
|
||||
ret = None
|
||||
if event in self.handlers:
|
||||
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
|
||||
if run_async:
|
||||
return self.start_background_task(self.handlers[event],
|
||||
*args)
|
||||
else:
|
||||
try:
|
||||
ret = await self.handlers[event](*args)
|
||||
except asyncio.CancelledError: # pragma: no cover
|
||||
pass
|
||||
except:
|
||||
self.logger.exception(event + ' async handler error')
|
||||
if event == 'connect':
|
||||
# if connect handler raised error we reject the
|
||||
# connection
|
||||
return False
|
||||
else:
|
||||
if run_async:
|
||||
async def async_handler():
|
||||
return self.handlers[event](*args)
|
||||
|
||||
return self.start_background_task(async_handler)
|
||||
else:
|
||||
try:
|
||||
ret = self.handlers[event](*args)
|
||||
except:
|
||||
self.logger.exception(event + ' handler error')
|
||||
if event == 'connect':
|
||||
# if connect handler raised error we reject the
|
||||
# connection
|
||||
return False
|
||||
return ret
|
||||
|
||||
async def _service_task(self): # pragma: no cover
|
||||
"""Monitor connected clients and clean up those that time out."""
|
||||
while True:
|
||||
if len(self.sockets) == 0:
|
||||
# nothing to do
|
||||
await self.sleep(self.ping_timeout)
|
||||
continue
|
||||
|
||||
# go through the entire client list in a ping interval cycle
|
||||
sleep_interval = self.ping_timeout / len(self.sockets)
|
||||
|
||||
try:
|
||||
# iterate over the current clients
|
||||
for socket in self.sockets.copy().values():
|
||||
if not socket.closing and not socket.closed:
|
||||
await socket.check_ping_timeout()
|
||||
await self.sleep(sleep_interval)
|
||||
except (
|
||||
SystemExit,
|
||||
KeyboardInterrupt,
|
||||
asyncio.CancelledError,
|
||||
GeneratorExit,
|
||||
):
|
||||
self.logger.info('service task canceled')
|
||||
break
|
||||
except:
|
||||
if asyncio.get_event_loop().is_closed():
|
||||
self.logger.info('event loop is closed, exiting service '
|
||||
'task')
|
||||
break
|
||||
|
||||
# an unexpected exception has occurred, log it and continue
|
||||
self.logger.exception('service task exception')
|
245
matteo_env/Lib/site-packages/engineio/asyncio_socket.py
Normal file
245
matteo_env/Lib/site-packages/engineio/asyncio_socket.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
import asyncio
|
||||
import sys
|
||||
import time
|
||||
|
||||
from . import exceptions
|
||||
from . import packet
|
||||
from . import payload
|
||||
from . import socket
|
||||
|
||||
|
||||
class AsyncSocket(socket.Socket):
|
||||
async def poll(self):
|
||||
"""Wait for packets to send to the client."""
|
||||
try:
|
||||
packets = [await asyncio.wait_for(
|
||||
self.queue.get(),
|
||||
self.server.ping_interval + self.server.ping_timeout)]
|
||||
self.queue.task_done()
|
||||
except (asyncio.TimeoutError, asyncio.CancelledError):
|
||||
raise exceptions.QueueEmpty()
|
||||
if packets == [None]:
|
||||
return []
|
||||
while True:
|
||||
try:
|
||||
pkt = self.queue.get_nowait()
|
||||
self.queue.task_done()
|
||||
if pkt is None:
|
||||
self.queue.put_nowait(None)
|
||||
break
|
||||
packets.append(pkt)
|
||||
except asyncio.QueueEmpty:
|
||||
break
|
||||
return packets
|
||||
|
||||
async def receive(self, pkt):
|
||||
"""Receive packet from the client."""
|
||||
self.server.logger.info('%s: Received packet %s data %s',
|
||||
self.sid, packet.packet_names[pkt.packet_type],
|
||||
pkt.data if not isinstance(pkt.data, bytes)
|
||||
else '<binary>')
|
||||
if pkt.packet_type == packet.PONG:
|
||||
self.schedule_ping()
|
||||
elif pkt.packet_type == packet.MESSAGE:
|
||||
await self.server._trigger_event(
|
||||
'message', self.sid, pkt.data,
|
||||
run_async=self.server.async_handlers)
|
||||
elif pkt.packet_type == packet.UPGRADE:
|
||||
await self.send(packet.Packet(packet.NOOP))
|
||||
elif pkt.packet_type == packet.CLOSE:
|
||||
await self.close(wait=False, abort=True)
|
||||
else:
|
||||
raise exceptions.UnknownPacketError()
|
||||
|
||||
async def check_ping_timeout(self):
|
||||
"""Make sure the client is still sending pings."""
|
||||
if self.closed:
|
||||
raise exceptions.SocketIsClosedError()
|
||||
if self.last_ping and \
|
||||
time.time() - self.last_ping > self.server.ping_timeout:
|
||||
self.server.logger.info('%s: Client is gone, closing socket',
|
||||
self.sid)
|
||||
# Passing abort=False here will cause close() to write a
|
||||
# CLOSE packet. This has the effect of updating half-open sockets
|
||||
# to their correct state of disconnected
|
||||
await self.close(wait=False, abort=False)
|
||||
return False
|
||||
return True
|
||||
|
||||
async def send(self, pkt):
|
||||
"""Send a packet to the client."""
|
||||
if not await self.check_ping_timeout():
|
||||
return
|
||||
else:
|
||||
await self.queue.put(pkt)
|
||||
self.server.logger.info('%s: Sending packet %s data %s',
|
||||
self.sid, packet.packet_names[pkt.packet_type],
|
||||
pkt.data if not isinstance(pkt.data, bytes)
|
||||
else '<binary>')
|
||||
|
||||
async def handle_get_request(self, environ):
|
||||
"""Handle a long-polling GET request from the client."""
|
||||
connections = [
|
||||
s.strip()
|
||||
for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
|
||||
transport = environ.get('HTTP_UPGRADE', '').lower()
|
||||
if 'upgrade' in connections and transport in self.upgrade_protocols:
|
||||
self.server.logger.info('%s: Received request to upgrade to %s',
|
||||
self.sid, transport)
|
||||
return await getattr(self, '_upgrade_' + transport)(environ)
|
||||
if self.upgrading or self.upgraded:
|
||||
# we are upgrading to WebSocket, do not return any more packets
|
||||
# through the polling endpoint
|
||||
return [packet.Packet(packet.NOOP)]
|
||||
try:
|
||||
packets = await self.poll()
|
||||
except exceptions.QueueEmpty:
|
||||
exc = sys.exc_info()
|
||||
await self.close(wait=False)
|
||||
raise exc[1].with_traceback(exc[2])
|
||||
return packets
|
||||
|
||||
async def handle_post_request(self, environ):
|
||||
"""Handle a long-polling POST request from the client."""
|
||||
length = int(environ.get('CONTENT_LENGTH', '0'))
|
||||
if length > self.server.max_http_buffer_size:
|
||||
raise exceptions.ContentTooLongError()
|
||||
else:
|
||||
body = (await environ['wsgi.input'].read(length)).decode('utf-8')
|
||||
p = payload.Payload(encoded_payload=body)
|
||||
for pkt in p.packets:
|
||||
await self.receive(pkt)
|
||||
|
||||
async def close(self, wait=True, abort=False):
|
||||
"""Close the socket connection."""
|
||||
if not self.closed and not self.closing:
|
||||
self.closing = True
|
||||
await self.server._trigger_event('disconnect', self.sid)
|
||||
if not abort:
|
||||
await self.send(packet.Packet(packet.CLOSE))
|
||||
self.closed = True
|
||||
if wait:
|
||||
await self.queue.join()
|
||||
|
||||
def schedule_ping(self):
|
||||
async def send_ping():
|
||||
self.last_ping = None
|
||||
await asyncio.sleep(self.server.ping_interval)
|
||||
if not self.closing and not self.closed:
|
||||
self.last_ping = time.time()
|
||||
await self.send(packet.Packet(packet.PING))
|
||||
|
||||
self.server.start_background_task(send_ping)
|
||||
|
||||
async def _upgrade_websocket(self, environ):
|
||||
"""Upgrade the connection from polling to websocket."""
|
||||
if self.upgraded:
|
||||
raise IOError('Socket has been upgraded already')
|
||||
if self.server._async['websocket'] is None:
|
||||
# the selected async mode does not support websocket
|
||||
return self.server._bad_request()
|
||||
ws = self.server._async['websocket'](self._websocket_handler)
|
||||
return await ws(environ)
|
||||
|
||||
async def _websocket_handler(self, ws):
|
||||
"""Engine.IO handler for websocket transport."""
|
||||
if self.connected:
|
||||
# the socket was already connected, so this is an upgrade
|
||||
self.upgrading = True # hold packet sends during the upgrade
|
||||
|
||||
try:
|
||||
pkt = await ws.wait()
|
||||
except IOError: # pragma: no cover
|
||||
return
|
||||
decoded_pkt = packet.Packet(encoded_packet=pkt)
|
||||
if decoded_pkt.packet_type != packet.PING or \
|
||||
decoded_pkt.data != 'probe':
|
||||
self.server.logger.info(
|
||||
'%s: Failed websocket upgrade, no PING packet', self.sid)
|
||||
self.upgrading = False
|
||||
return
|
||||
await ws.send(packet.Packet(packet.PONG, data='probe').encode())
|
||||
await self.queue.put(packet.Packet(packet.NOOP)) # end poll
|
||||
|
||||
try:
|
||||
pkt = await ws.wait()
|
||||
except IOError: # pragma: no cover
|
||||
self.upgrading = False
|
||||
return
|
||||
decoded_pkt = packet.Packet(encoded_packet=pkt)
|
||||
if decoded_pkt.packet_type != packet.UPGRADE:
|
||||
self.upgraded = False
|
||||
self.server.logger.info(
|
||||
('%s: Failed websocket upgrade, expected UPGRADE packet, '
|
||||
'received %s instead.'),
|
||||
self.sid, pkt)
|
||||
self.upgrading = False
|
||||
return
|
||||
self.upgraded = True
|
||||
self.upgrading = False
|
||||
else:
|
||||
self.connected = True
|
||||
self.upgraded = True
|
||||
|
||||
# start separate writer thread
|
||||
async def writer():
|
||||
while True:
|
||||
packets = None
|
||||
try:
|
||||
packets = await self.poll()
|
||||
except exceptions.QueueEmpty:
|
||||
break
|
||||
if not packets:
|
||||
# empty packet list returned -> connection closed
|
||||
break
|
||||
try:
|
||||
for pkt in packets:
|
||||
await ws.send(pkt.encode())
|
||||
except:
|
||||
break
|
||||
writer_task = asyncio.ensure_future(writer())
|
||||
|
||||
self.server.logger.info(
|
||||
'%s: Upgrade to websocket successful', self.sid)
|
||||
|
||||
while True:
|
||||
p = None
|
||||
wait_task = asyncio.ensure_future(ws.wait())
|
||||
try:
|
||||
p = await asyncio.wait_for(
|
||||
wait_task,
|
||||
self.server.ping_interval + self.server.ping_timeout)
|
||||
except asyncio.CancelledError: # pragma: no cover
|
||||
# there is a bug (https://bugs.python.org/issue30508) in
|
||||
# asyncio that causes a "Task exception never retrieved" error
|
||||
# to appear when wait_task raises an exception before it gets
|
||||
# cancelled. Calling wait_task.exception() prevents the error
|
||||
# from being issued in Python 3.6, but causes other errors in
|
||||
# other versions, so we run it with all errors suppressed and
|
||||
# hope for the best.
|
||||
try:
|
||||
wait_task.exception()
|
||||
except:
|
||||
pass
|
||||
break
|
||||
except:
|
||||
break
|
||||
if p is None:
|
||||
# connection closed by client
|
||||
break
|
||||
pkt = packet.Packet(encoded_packet=p)
|
||||
try:
|
||||
await self.receive(pkt)
|
||||
except exceptions.UnknownPacketError: # pragma: no cover
|
||||
pass
|
||||
except exceptions.SocketIsClosedError: # pragma: no cover
|
||||
self.server.logger.info('Receive error -- socket is closed')
|
||||
break
|
||||
except: # pragma: no cover
|
||||
# if we get an unexpected exception we log the error and exit
|
||||
# the connection properly
|
||||
self.server.logger.exception('Unknown receive error')
|
||||
|
||||
await self.queue.put(None) # unlock the writer task so it can exit
|
||||
await asyncio.wait_for(writer_task, timeout=None)
|
||||
await self.close(wait=False, abort=True)
|
706
matteo_env/Lib/site-packages/engineio/client.py
Normal file
706
matteo_env/Lib/site-packages/engineio/client.py
Normal file
|
@ -0,0 +1,706 @@
|
|||
from base64 import b64encode
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
try:
|
||||
import queue
|
||||
except ImportError: # pragma: no cover
|
||||
import Queue as queue
|
||||
import signal
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
import urllib
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError: # pragma: no cover
|
||||
requests = None
|
||||
try:
|
||||
import websocket
|
||||
except ImportError: # pragma: no cover
|
||||
websocket = None
|
||||
from . import exceptions
|
||||
from . import packet
|
||||
from . import payload
|
||||
|
||||
default_logger = logging.getLogger('engineio.client')
|
||||
connected_clients = []
|
||||
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
"""SIGINT handler.
|
||||
|
||||
Disconnect all active clients and then invoke the original signal handler.
|
||||
"""
|
||||
for client in connected_clients[:]:
|
||||
if not client.is_asyncio_based():
|
||||
client.disconnect()
|
||||
if callable(original_signal_handler):
|
||||
return original_signal_handler(sig, frame)
|
||||
else: # pragma: no cover
|
||||
# Handle case where no original SIGINT handler was present.
|
||||
return signal.default_int_handler(sig, frame)
|
||||
|
||||
|
||||
original_signal_handler = None
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""An Engine.IO client.
|
||||
|
||||
This class implements a fully compliant Engine.IO web client with support
|
||||
for websocket and long-polling transports.
|
||||
|
||||
:param logger: To enable logging set to ``True`` or pass a logger object to
|
||||
use. To disable logging set to ``False``. The default is
|
||||
``False``. Note that fatal errors are logged even when
|
||||
``logger`` is ``False``.
|
||||
:param json: An alternative json module to use for encoding and decoding
|
||||
packets. Custom json modules must have ``dumps`` and ``loads``
|
||||
functions that are compatible with the standard library
|
||||
versions.
|
||||
:param request_timeout: A timeout in seconds for requests. The default is
|
||||
5 seconds.
|
||||
:param http_session: an initialized ``requests.Session`` object to be used
|
||||
when sending requests to the server. Use it if you
|
||||
need to add special client options such as proxy
|
||||
servers, SSL certificates, etc.
|
||||
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
|
||||
skip SSL certificate verification, allowing
|
||||
connections to servers with self signed certificates.
|
||||
The default is ``True``.
|
||||
"""
|
||||
event_names = ['connect', 'disconnect', 'message']
|
||||
|
||||
def __init__(self,
|
||||
logger=False,
|
||||
json=None,
|
||||
request_timeout=5,
|
||||
http_session=None,
|
||||
ssl_verify=True):
|
||||
global original_signal_handler
|
||||
if original_signal_handler is None and \
|
||||
threading.current_thread() == threading.main_thread():
|
||||
original_signal_handler = signal.signal(signal.SIGINT,
|
||||
signal_handler)
|
||||
self.handlers = {}
|
||||
self.base_url = None
|
||||
self.transports = None
|
||||
self.current_transport = None
|
||||
self.sid = None
|
||||
self.upgrades = None
|
||||
self.ping_interval = None
|
||||
self.ping_timeout = None
|
||||
self.http = http_session
|
||||
self.ws = None
|
||||
self.read_loop_task = None
|
||||
self.write_loop_task = None
|
||||
self.queue = None
|
||||
self.state = 'disconnected'
|
||||
self.ssl_verify = ssl_verify
|
||||
|
||||
if json is not None:
|
||||
packet.Packet.json = json
|
||||
if not isinstance(logger, bool):
|
||||
self.logger = logger
|
||||
else:
|
||||
self.logger = default_logger
|
||||
if self.logger.level == logging.NOTSET:
|
||||
if logger:
|
||||
self.logger.setLevel(logging.INFO)
|
||||
else:
|
||||
self.logger.setLevel(logging.ERROR)
|
||||
self.logger.addHandler(logging.StreamHandler())
|
||||
|
||||
self.request_timeout = request_timeout
|
||||
|
||||
def is_asyncio_based(self):
|
||||
return False
|
||||
|
||||
def on(self, event, handler=None):
|
||||
"""Register an event handler.
|
||||
|
||||
:param event: The event name. Can be ``'connect'``, ``'message'`` or
|
||||
``'disconnect'``.
|
||||
:param handler: The function that should be invoked to handle the
|
||||
event. When this parameter is not given, the method
|
||||
acts as a decorator for the handler function.
|
||||
|
||||
Example usage::
|
||||
|
||||
# as a decorator:
|
||||
@eio.on('connect')
|
||||
def connect_handler():
|
||||
print('Connection request')
|
||||
|
||||
# as a method:
|
||||
def message_handler(msg):
|
||||
print('Received message: ', msg)
|
||||
eio.send('response')
|
||||
eio.on('message', message_handler)
|
||||
"""
|
||||
if event not in self.event_names:
|
||||
raise ValueError('Invalid event')
|
||||
|
||||
def set_handler(handler):
|
||||
self.handlers[event] = handler
|
||||
return handler
|
||||
|
||||
if handler is None:
|
||||
return set_handler
|
||||
set_handler(handler)
|
||||
|
||||
def connect(self, url, headers=None, transports=None,
|
||||
engineio_path='engine.io'):
|
||||
"""Connect to an Engine.IO server.
|
||||
|
||||
:param url: The URL of the Engine.IO server. It can include custom
|
||||
query string parameters if required by the server.
|
||||
:param headers: A dictionary with custom headers to send with the
|
||||
connection request.
|
||||
:param transports: The list of allowed transports. Valid transports
|
||||
are ``'polling'`` and ``'websocket'``. If not
|
||||
given, the polling transport is connected first,
|
||||
then an upgrade to websocket is attempted.
|
||||
:param engineio_path: The endpoint where the Engine.IO server is
|
||||
installed. The default value is appropriate for
|
||||
most cases.
|
||||
|
||||
Example usage::
|
||||
|
||||
eio = engineio.Client()
|
||||
eio.connect('http://localhost:5000')
|
||||
"""
|
||||
if self.state != 'disconnected':
|
||||
raise ValueError('Client is not in a disconnected state')
|
||||
valid_transports = ['polling', 'websocket']
|
||||
if transports is not None:
|
||||
if isinstance(transports, str):
|
||||
transports = [transports]
|
||||
transports = [transport for transport in transports
|
||||
if transport in valid_transports]
|
||||
if not transports:
|
||||
raise ValueError('No valid transports provided')
|
||||
self.transports = transports or valid_transports
|
||||
self.queue = self.create_queue()
|
||||
return getattr(self, '_connect_' + self.transports[0])(
|
||||
url, headers or {}, engineio_path)
|
||||
|
||||
def wait(self):
|
||||
"""Wait until the connection with the server ends.
|
||||
|
||||
Client applications can use this function to block the main thread
|
||||
during the life of the connection.
|
||||
"""
|
||||
if self.read_loop_task:
|
||||
self.read_loop_task.join()
|
||||
|
||||
def send(self, data):
|
||||
"""Send a message to a client.
|
||||
|
||||
:param data: The data to send to the client. Data can be of type
|
||||
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
|
||||
or ``dict``, the data will be serialized as JSON.
|
||||
"""
|
||||
self._send_packet(packet.Packet(packet.MESSAGE, data=data))
|
||||
|
||||
def disconnect(self, abort=False):
|
||||
"""Disconnect from the server.
|
||||
|
||||
:param abort: If set to ``True``, do not wait for background tasks
|
||||
associated with the connection to end.
|
||||
"""
|
||||
if self.state == 'connected':
|
||||
self._send_packet(packet.Packet(packet.CLOSE))
|
||||
self.queue.put(None)
|
||||
self.state = 'disconnecting'
|
||||
self._trigger_event('disconnect', run_async=False)
|
||||
if self.current_transport == 'websocket':
|
||||
self.ws.close()
|
||||
if not abort:
|
||||
self.read_loop_task.join()
|
||||
self.state = 'disconnected'
|
||||
try:
|
||||
connected_clients.remove(self)
|
||||
except ValueError: # pragma: no cover
|
||||
pass
|
||||
self._reset()
|
||||
|
||||
def transport(self):
|
||||
"""Return the name of the transport currently in use.
|
||||
|
||||
The possible values returned by this function are ``'polling'`` and
|
||||
``'websocket'``.
|
||||
"""
|
||||
return self.current_transport
|
||||
|
||||
def start_background_task(self, target, *args, **kwargs):
|
||||
"""Start a background task.
|
||||
|
||||
This is a utility function that applications can use to start a
|
||||
background task.
|
||||
|
||||
:param target: the target function to execute.
|
||||
:param args: arguments to pass to the function.
|
||||
:param kwargs: keyword arguments to pass to the function.
|
||||
|
||||
This function returns an object compatible with the `Thread` class in
|
||||
the Python standard library. The `start()` method on this object is
|
||||
already called by this function.
|
||||
"""
|
||||
th = threading.Thread(target=target, args=args, kwargs=kwargs)
|
||||
th.start()
|
||||
return th
|
||||
|
||||
def sleep(self, seconds=0):
|
||||
"""Sleep for the requested amount of time."""
|
||||
return time.sleep(seconds)
|
||||
|
||||
def create_queue(self, *args, **kwargs):
|
||||
"""Create a queue object."""
|
||||
q = queue.Queue(*args, **kwargs)
|
||||
q.Empty = queue.Empty
|
||||
return q
|
||||
|
||||
def create_event(self, *args, **kwargs):
|
||||
"""Create an event object."""
|
||||
return threading.Event(*args, **kwargs)
|
||||
|
||||
def _reset(self):
|
||||
self.state = 'disconnected'
|
||||
self.sid = None
|
||||
|
||||
def _connect_polling(self, url, headers, engineio_path):
|
||||
"""Establish a long-polling connection to the Engine.IO server."""
|
||||
if requests is None: # pragma: no cover
|
||||
# not installed
|
||||
self.logger.error('requests package is not installed -- cannot '
|
||||
'send HTTP requests!')
|
||||
return
|
||||
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
|
||||
self.logger.info('Attempting polling connection to ' + self.base_url)
|
||||
r = self._send_request(
|
||||
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
|
||||
timeout=self.request_timeout)
|
||||
if r is None:
|
||||
self._reset()
|
||||
raise exceptions.ConnectionError(
|
||||
'Connection refused by the server')
|
||||
if r.status_code < 200 or r.status_code >= 300:
|
||||
self._reset()
|
||||
try:
|
||||
arg = r.json()
|
||||
except JSONDecodeError:
|
||||
arg = None
|
||||
raise exceptions.ConnectionError(
|
||||
'Unexpected status code {} in server response'.format(
|
||||
r.status_code), arg)
|
||||
try:
|
||||
p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
|
||||
except ValueError:
|
||||
raise exceptions.ConnectionError(
|
||||
'Unexpected response from server') from None
|
||||
open_packet = p.packets[0]
|
||||
if open_packet.packet_type != packet.OPEN:
|
||||
raise exceptions.ConnectionError(
|
||||
'OPEN packet not returned by server')
|
||||
self.logger.info(
|
||||
'Polling connection accepted with ' + str(open_packet.data))
|
||||
self.sid = open_packet.data['sid']
|
||||
self.upgrades = open_packet.data['upgrades']
|
||||
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
|
||||
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
|
||||
self.current_transport = 'polling'
|
||||
self.base_url += '&sid=' + self.sid
|
||||
|
||||
self.state = 'connected'
|
||||
connected_clients.append(self)
|
||||
self._trigger_event('connect', run_async=False)
|
||||
|
||||
for pkt in p.packets[1:]:
|
||||
self._receive_packet(pkt)
|
||||
|
||||
if 'websocket' in self.upgrades and 'websocket' in self.transports:
|
||||
# attempt to upgrade to websocket
|
||||
if self._connect_websocket(url, headers, engineio_path):
|
||||
# upgrade to websocket succeeded, we're done here
|
||||
return
|
||||
|
||||
# start background tasks associated with this client
|
||||
self.write_loop_task = self.start_background_task(self._write_loop)
|
||||
self.read_loop_task = self.start_background_task(
|
||||
self._read_loop_polling)
|
||||
|
||||
def _connect_websocket(self, url, headers, engineio_path):
|
||||
"""Establish or upgrade to a WebSocket connection with the server."""
|
||||
if websocket is None: # pragma: no cover
|
||||
# not installed
|
||||
self.logger.warning('websocket-client package not installed, only '
|
||||
'polling transport is available')
|
||||
return False
|
||||
websocket_url = self._get_engineio_url(url, engineio_path, 'websocket')
|
||||
if self.sid:
|
||||
self.logger.info(
|
||||
'Attempting WebSocket upgrade to ' + websocket_url)
|
||||
upgrade = True
|
||||
websocket_url += '&sid=' + self.sid
|
||||
else:
|
||||
upgrade = False
|
||||
self.base_url = websocket_url
|
||||
self.logger.info(
|
||||
'Attempting WebSocket connection to ' + websocket_url)
|
||||
|
||||
# get cookies and other settings from the long-polling connection
|
||||
# so that they are preserved when connecting to the WebSocket route
|
||||
cookies = None
|
||||
extra_options = {}
|
||||
if self.http:
|
||||
# cookies
|
||||
cookies = '; '.join(["{}={}".format(cookie.name, cookie.value)
|
||||
for cookie in self.http.cookies])
|
||||
for header, value in headers.items():
|
||||
if header.lower() == 'cookie':
|
||||
if cookies:
|
||||
cookies += '; '
|
||||
cookies += value
|
||||
del headers[header]
|
||||
break
|
||||
|
||||
# auth
|
||||
if 'Authorization' not in headers and self.http.auth is not None:
|
||||
if not isinstance(self.http.auth, tuple): # pragma: no cover
|
||||
raise ValueError('Only basic authentication is supported')
|
||||
basic_auth = '{}:{}'.format(
|
||||
self.http.auth[0], self.http.auth[1]).encode('utf-8')
|
||||
basic_auth = b64encode(basic_auth).decode('utf-8')
|
||||
headers['Authorization'] = 'Basic ' + basic_auth
|
||||
|
||||
# cert
|
||||
# this can be given as ('certfile', 'keyfile') or just 'certfile'
|
||||
if isinstance(self.http.cert, tuple):
|
||||
extra_options['sslopt'] = {
|
||||
'certfile': self.http.cert[0],
|
||||
'keyfile': self.http.cert[1]}
|
||||
elif self.http.cert:
|
||||
extra_options['sslopt'] = {'certfile': self.http.cert}
|
||||
|
||||
# proxies
|
||||
if self.http.proxies:
|
||||
proxy_url = None
|
||||
if websocket_url.startswith('ws://'):
|
||||
proxy_url = self.http.proxies.get(
|
||||
'ws', self.http.proxies.get('http'))
|
||||
else: # wss://
|
||||
proxy_url = self.http.proxies.get(
|
||||
'wss', self.http.proxies.get('https'))
|
||||
if proxy_url:
|
||||
parsed_url = urllib.parse.urlparse(
|
||||
proxy_url if '://' in proxy_url
|
||||
else 'scheme://' + proxy_url)
|
||||
extra_options['http_proxy_host'] = parsed_url.hostname
|
||||
extra_options['http_proxy_port'] = parsed_url.port
|
||||
extra_options['http_proxy_auth'] = (
|
||||
(parsed_url.username, parsed_url.password)
|
||||
if parsed_url.username or parsed_url.password
|
||||
else None)
|
||||
|
||||
# verify
|
||||
if not self.http.verify:
|
||||
self.ssl_verify = False
|
||||
|
||||
if not self.ssl_verify:
|
||||
extra_options['sslopt'] = {"cert_reqs": ssl.CERT_NONE}
|
||||
try:
|
||||
ws = websocket.create_connection(
|
||||
websocket_url + self._get_url_timestamp(), header=headers,
|
||||
cookie=cookies, enable_multithread=True, **extra_options)
|
||||
except (ConnectionError, IOError, websocket.WebSocketException):
|
||||
if upgrade:
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: connection error')
|
||||
return False
|
||||
else:
|
||||
raise exceptions.ConnectionError('Connection error')
|
||||
if upgrade:
|
||||
p = packet.Packet(packet.PING, data='probe').encode()
|
||||
try:
|
||||
ws.send(p)
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: unexpected send exception: %s',
|
||||
str(e))
|
||||
return False
|
||||
try:
|
||||
p = ws.recv()
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: unexpected recv exception: %s',
|
||||
str(e))
|
||||
return False
|
||||
pkt = packet.Packet(encoded_packet=p)
|
||||
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: no PONG packet')
|
||||
return False
|
||||
p = packet.Packet(packet.UPGRADE).encode()
|
||||
try:
|
||||
ws.send(p)
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.warning(
|
||||
'WebSocket upgrade failed: unexpected send exception: %s',
|
||||
str(e))
|
||||
return False
|
||||
self.current_transport = 'websocket'
|
||||
self.logger.info('WebSocket upgrade was successful')
|
||||
else:
|
||||
try:
|
||||
p = ws.recv()
|
||||
except Exception as e: # pragma: no cover
|
||||
raise exceptions.ConnectionError(
|
||||
'Unexpected recv exception: ' + str(e))
|
||||
open_packet = packet.Packet(encoded_packet=p)
|
||||
if open_packet.packet_type != packet.OPEN:
|
||||
raise exceptions.ConnectionError('no OPEN packet')
|
||||
self.logger.info(
|
||||
'WebSocket connection accepted with ' + str(open_packet.data))
|
||||
self.sid = open_packet.data['sid']
|
||||
self.upgrades = open_packet.data['upgrades']
|
||||
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
|
||||
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
|
||||
self.current_transport = 'websocket'
|
||||
|
||||
self.state = 'connected'
|
||||
connected_clients.append(self)
|
||||
self._trigger_event('connect', run_async=False)
|
||||
self.ws = ws
|
||||
self.ws.settimeout(self.ping_interval + self.ping_timeout)
|
||||
|
||||
# start background tasks associated with this client
|
||||
self.write_loop_task = self.start_background_task(self._write_loop)
|
||||
self.read_loop_task = self.start_background_task(
|
||||
self._read_loop_websocket)
|
||||
return True
|
||||
|
||||
def _receive_packet(self, pkt):
|
||||
"""Handle incoming packets from the server."""
|
||||
packet_name = packet.packet_names[pkt.packet_type] \
|
||||
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
|
||||
self.logger.info(
|
||||
'Received packet %s data %s', packet_name,
|
||||
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
|
||||
if pkt.packet_type == packet.MESSAGE:
|
||||
self._trigger_event('message', pkt.data, run_async=True)
|
||||
elif pkt.packet_type == packet.PING:
|
||||
self._send_packet(packet.Packet(packet.PONG, pkt.data))
|
||||
elif pkt.packet_type == packet.CLOSE:
|
||||
self.disconnect(abort=True)
|
||||
elif pkt.packet_type == packet.NOOP:
|
||||
pass
|
||||
else:
|
||||
self.logger.error('Received unexpected packet of type %s',
|
||||
pkt.packet_type)
|
||||
|
||||
def _send_packet(self, pkt):
|
||||
"""Queue a packet to be sent to the server."""
|
||||
if self.state != 'connected':
|
||||
return
|
||||
self.queue.put(pkt)
|
||||
self.logger.info(
|
||||
'Sending packet %s data %s',
|
||||
packet.packet_names[pkt.packet_type],
|
||||
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
|
||||
|
||||
def _send_request(
|
||||
self, method, url, headers=None, body=None,
|
||||
timeout=None): # pragma: no cover
|
||||
if self.http is None:
|
||||
self.http = requests.Session()
|
||||
try:
|
||||
return self.http.request(method, url, headers=headers, data=body,
|
||||
timeout=timeout, verify=self.ssl_verify)
|
||||
except requests.exceptions.RequestException as exc:
|
||||
self.logger.info('HTTP %s request to %s failed with error %s.',
|
||||
method, url, exc)
|
||||
|
||||
def _trigger_event(self, event, *args, **kwargs):
|
||||
"""Invoke an event handler."""
|
||||
run_async = kwargs.pop('run_async', False)
|
||||
if event in self.handlers:
|
||||
if run_async:
|
||||
return self.start_background_task(self.handlers[event], *args)
|
||||
else:
|
||||
try:
|
||||
return self.handlers[event](*args)
|
||||
except:
|
||||
self.logger.exception(event + ' handler error')
|
||||
|
||||
def _get_engineio_url(self, url, engineio_path, transport):
|
||||
"""Generate the Engine.IO connection URL."""
|
||||
engineio_path = engineio_path.strip('/')
|
||||
parsed_url = urllib.parse.urlparse(url)
|
||||
|
||||
if transport == 'polling':
|
||||
scheme = 'http'
|
||||
elif transport == 'websocket':
|
||||
scheme = 'ws'
|
||||
else: # pragma: no cover
|
||||
raise ValueError('invalid transport')
|
||||
if parsed_url.scheme in ['https', 'wss']:
|
||||
scheme += 's'
|
||||
|
||||
return ('{scheme}://{netloc}/{path}/?{query}'
|
||||
'{sep}transport={transport}&EIO=4').format(
|
||||
scheme=scheme, netloc=parsed_url.netloc,
|
||||
path=engineio_path, query=parsed_url.query,
|
||||
sep='&' if parsed_url.query else '',
|
||||
transport=transport)
|
||||
|
||||
def _get_url_timestamp(self):
|
||||
"""Generate the Engine.IO query string timestamp."""
|
||||
return '&t=' + str(time.time())
|
||||
|
||||
def _read_loop_polling(self):
|
||||
"""Read packets by polling the Engine.IO server."""
|
||||
while self.state == 'connected':
|
||||
self.logger.info(
|
||||
'Sending polling GET request to ' + self.base_url)
|
||||
r = self._send_request(
|
||||
'GET', self.base_url + self._get_url_timestamp(),
|
||||
timeout=max(self.ping_interval, self.ping_timeout) + 5)
|
||||
if r is None:
|
||||
self.logger.warning(
|
||||
'Connection refused by the server, aborting')
|
||||
self.queue.put(None)
|
||||
break
|
||||
if r.status_code < 200 or r.status_code >= 300:
|
||||
self.logger.warning('Unexpected status code %s in server '
|
||||
'response, aborting', r.status_code)
|
||||
self.queue.put(None)
|
||||
break
|
||||
try:
|
||||
p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
|
||||
except ValueError:
|
||||
self.logger.warning(
|
||||
'Unexpected packet from server, aborting')
|
||||
self.queue.put(None)
|
||||
break
|
||||
for pkt in p.packets:
|
||||
self._receive_packet(pkt)
|
||||
|
||||
self.logger.info('Waiting for write loop task to end')
|
||||
self.write_loop_task.join()
|
||||
if self.state == 'connected':
|
||||
self._trigger_event('disconnect', run_async=False)
|
||||
try:
|
||||
connected_clients.remove(self)
|
||||
except ValueError: # pragma: no cover
|
||||
pass
|
||||
self._reset()
|
||||
self.logger.info('Exiting read loop task')
|
||||
|
||||
def _read_loop_websocket(self):
|
||||
"""Read packets from the Engine.IO WebSocket connection."""
|
||||
while self.state == 'connected':
|
||||
p = None
|
||||
try:
|
||||
p = self.ws.recv()
|
||||
except websocket.WebSocketTimeoutException:
|
||||
self.logger.warning(
|
||||
'Server has stopped communicating, aborting')
|
||||
self.queue.put(None)
|
||||
break
|
||||
except websocket.WebSocketConnectionClosedException:
|
||||
self.logger.warning(
|
||||
'WebSocket connection was closed, aborting')
|
||||
self.queue.put(None)
|
||||
break
|
||||
except Exception as e:
|
||||
self.logger.info(
|
||||
'Unexpected error receiving packet: "%s", aborting',
|
||||
str(e))
|
||||
self.queue.put(None)
|
||||
break
|
||||
try:
|
||||
pkt = packet.Packet(encoded_packet=p)
|
||||
except Exception as e: # pragma: no cover
|
||||
self.logger.info(
|
||||
'Unexpected error decoding packet: "%s", aborting', str(e))
|
||||
self.queue.put(None)
|
||||
break
|
||||
self._receive_packet(pkt)
|
||||
|
||||
self.logger.info('Waiting for write loop task to end')
|
||||
self.write_loop_task.join()
|
||||
if self.state == 'connected':
|
||||
self._trigger_event('disconnect', run_async=False)
|
||||
try:
|
||||
connected_clients.remove(self)
|
||||
except ValueError: # pragma: no cover
|
||||
pass
|
||||
self._reset()
|
||||
self.logger.info('Exiting read loop task')
|
||||
|
||||
def _write_loop(self):
|
||||
"""This background task sends packages to the server as they are
|
||||
pushed to the send queue.
|
||||
"""
|
||||
while self.state == 'connected':
|
||||
# to simplify the timeout handling, use the maximum of the
|
||||
# ping interval and ping timeout as timeout, with an extra 5
|
||||
# seconds grace period
|
||||
timeout = max(self.ping_interval, self.ping_timeout) + 5
|
||||
packets = None
|
||||
try:
|
||||
packets = [self.queue.get(timeout=timeout)]
|
||||
except self.queue.Empty:
|
||||
self.logger.error('packet queue is empty, aborting')
|
||||
break
|
||||
if packets == [None]:
|
||||
self.queue.task_done()
|
||||
packets = []
|
||||
else:
|
||||
while True:
|
||||
try:
|
||||
packets.append(self.queue.get(block=False))
|
||||
except self.queue.Empty:
|
||||
break
|
||||
if packets[-1] is None:
|
||||
packets = packets[:-1]
|
||||
self.queue.task_done()
|
||||
break
|
||||
if not packets:
|
||||
# empty packet list returned -> connection closed
|
||||
break
|
||||
if self.current_transport == 'polling':
|
||||
p = payload.Payload(packets=packets)
|
||||
r = self._send_request(
|
||||
'POST', self.base_url, body=p.encode(),
|
||||
headers={'Content-Type': 'application/octet-stream'},
|
||||
timeout=self.request_timeout)
|
||||
for pkt in packets:
|
||||
self.queue.task_done()
|
||||
if r is None:
|
||||
self.logger.warning(
|
||||
'Connection refused by the server, aborting')
|
||||
break
|
||||
if r.status_code < 200 or r.status_code >= 300:
|
||||
self.logger.warning('Unexpected status code %s in server '
|
||||
'response, aborting', r.status_code)
|
||||
self._reset()
|
||||
break
|
||||
else:
|
||||
# websocket
|
||||
try:
|
||||
for pkt in packets:
|
||||
encoded_packet = pkt.encode()
|
||||
if pkt.binary:
|
||||
self.ws.send_binary(encoded_packet)
|
||||
else:
|
||||
self.ws.send(encoded_packet)
|
||||
self.queue.task_done()
|
||||
except (websocket.WebSocketConnectionClosedException,
|
||||
BrokenPipeError, OSError):
|
||||
self.logger.warning(
|
||||
'WebSocket connection was closed, aborting')
|
||||
break
|
||||
self.logger.info('Exiting write loop task')
|
22
matteo_env/Lib/site-packages/engineio/exceptions.py
Normal file
22
matteo_env/Lib/site-packages/engineio/exceptions.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
class EngineIOError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ContentTooLongError(EngineIOError):
|
||||
pass
|
||||
|
||||
|
||||
class UnknownPacketError(EngineIOError):
|
||||
pass
|
||||
|
||||
|
||||
class QueueEmpty(EngineIOError):
|
||||
pass
|
||||
|
||||
|
||||
class SocketIsClosedError(EngineIOError):
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionError(EngineIOError):
|
||||
pass
|
87
matteo_env/Lib/site-packages/engineio/middleware.py
Normal file
87
matteo_env/Lib/site-packages/engineio/middleware.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
import os
|
||||
from engineio.static_files import get_static_file
|
||||
|
||||
|
||||
class WSGIApp(object):
|
||||
"""WSGI application middleware for Engine.IO.
|
||||
|
||||
This middleware dispatches traffic to an Engine.IO application. It can
|
||||
also serve a list of static files to the client, or forward unrelated
|
||||
HTTP traffic to another WSGI application.
|
||||
|
||||
:param engineio_app: The Engine.IO server. Must be an instance of the
|
||||
``engineio.Server`` class.
|
||||
:param wsgi_app: The WSGI app that receives all other traffic.
|
||||
:param static_files: A dictionary with static file mapping rules. See the
|
||||
documentation for details on this argument.
|
||||
:param engineio_path: The endpoint where the Engine.IO application should
|
||||
be installed. The default value is appropriate for
|
||||
most cases.
|
||||
|
||||
Example usage::
|
||||
|
||||
import engineio
|
||||
import eventlet
|
||||
|
||||
eio = engineio.Server()
|
||||
app = engineio.WSGIApp(eio, static_files={
|
||||
'/': {'content_type': 'text/html', 'filename': 'index.html'},
|
||||
'/index.html': {'content_type': 'text/html',
|
||||
'filename': 'index.html'},
|
||||
})
|
||||
eventlet.wsgi.server(eventlet.listen(('', 8000)), app)
|
||||
"""
|
||||
def __init__(self, engineio_app, wsgi_app=None, static_files=None,
|
||||
engineio_path='engine.io'):
|
||||
self.engineio_app = engineio_app
|
||||
self.wsgi_app = wsgi_app
|
||||
self.engineio_path = engineio_path.strip('/')
|
||||
self.static_files = static_files or {}
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
if 'gunicorn.socket' in environ:
|
||||
# gunicorn saves the socket under environ['gunicorn.socket'], while
|
||||
# eventlet saves it under environ['eventlet.input']. Eventlet also
|
||||
# stores the socket inside a wrapper class, while gunicon writes it
|
||||
# directly into the environment. To give eventlet's WebSocket
|
||||
# module access to this socket when running under gunicorn, here we
|
||||
# copy the socket to the eventlet format.
|
||||
class Input(object):
|
||||
def __init__(self, socket):
|
||||
self.socket = socket
|
||||
|
||||
def get_socket(self):
|
||||
return self.socket
|
||||
|
||||
environ['eventlet.input'] = Input(environ['gunicorn.socket'])
|
||||
path = environ['PATH_INFO']
|
||||
if path is not None and \
|
||||
path.startswith('/{0}/'.format(self.engineio_path)):
|
||||
return self.engineio_app.handle_request(environ, start_response)
|
||||
else:
|
||||
static_file = get_static_file(path, self.static_files) \
|
||||
if self.static_files else None
|
||||
if static_file:
|
||||
if os.path.exists(static_file['filename']):
|
||||
start_response(
|
||||
'200 OK',
|
||||
[('Content-Type', static_file['content_type'])])
|
||||
with open(static_file['filename'], 'rb') as f:
|
||||
return [f.read()]
|
||||
else:
|
||||
return self.not_found(start_response)
|
||||
elif self.wsgi_app is not None:
|
||||
return self.wsgi_app(environ, start_response)
|
||||
return self.not_found(start_response)
|
||||
|
||||
def not_found(self, start_response):
|
||||
start_response("404 Not Found", [('Content-Type', 'text/plain')])
|
||||
return [b'Not Found']
|
||||
|
||||
|
||||
class Middleware(WSGIApp):
|
||||
"""This class has been renamed to ``WSGIApp`` and is now deprecated."""
|
||||
def __init__(self, engineio_app, wsgi_app=None,
|
||||
engineio_path='engine.io'):
|
||||
super(Middleware, self).__init__(engineio_app, wsgi_app,
|
||||
engineio_path=engineio_path)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user