added ;saveteam, ;showteam, ;showplayer
This commit is contained in:
parent
2ec0efefc9
commit
ab8151206b
44
database.py
44
database.py
|
@ -58,12 +58,20 @@ def initialcheck():
|
|||
strikeouts_taken integer DEFAULT 0
|
||||
);"""
|
||||
|
||||
teams_table_check_string = """ CREATE TABLE IF NOT EXISTS teams (
|
||||
counter integer PRIMARY KEY,
|
||||
name text NOT NULL,
|
||||
team_json_string text NOT NULL,
|
||||
timestamp text NOT NULL
|
||||
); """
|
||||
|
||||
if conn is not None:
|
||||
c = conn.cursor()
|
||||
c.execute(soulscream_table_check_string)
|
||||
c.execute(player_cache_table_check_string)
|
||||
c.execute(player_table_check_string)
|
||||
c.execute(player_stats_table_check_string)
|
||||
c.execute(teams_table_check_string)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
@ -163,7 +171,7 @@ def designate_player(user, player_json):
|
|||
conn.close()
|
||||
|
||||
def get_user_player_conn(conn, user):
|
||||
#try:
|
||||
try:
|
||||
if conn is not None:
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT player_json_string FROM user_designated_players WHERE user_id=?", (user.id,))
|
||||
|
@ -173,11 +181,41 @@ def get_user_player_conn(conn, user):
|
|||
return False
|
||||
else:
|
||||
print(conn)
|
||||
#except:
|
||||
#print(conn)
|
||||
except:
|
||||
print(conn)
|
||||
|
||||
def get_user_player(user):
|
||||
conn = create_connection()
|
||||
player = get_user_player_conn(conn, user)
|
||||
conn.close()
|
||||
return player
|
||||
|
||||
def save_team(name, team_json_string):
|
||||
conn = create_connection()
|
||||
try:
|
||||
if conn is not None:
|
||||
c = conn.cursor()
|
||||
store_string = """ INSERT INTO teams(name, team_json_string, timestamp)
|
||||
VALUES (?,?, ?) """
|
||||
c.execute(store_string, (name, team_json_string, datetime.datetime.now(datetime.timezone.utc)))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return True
|
||||
conn.close()
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_team(name):
|
||||
conn = create_connection()
|
||||
if conn is not None:
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT * FROM teams WHERE name=?", (name,))
|
||||
team = c.fetchone()
|
||||
|
||||
conn.close()
|
||||
print(team[2])
|
||||
return team[2] #returns a json string
|
||||
|
||||
conn.close()
|
||||
return None
|
||||
|
|
|
@ -96,3 +96,39 @@ def large_scale_debug(): #massive debug, goes in games.py
|
|||
# there were {result[9]}, {result[10]}, and {result[11]} strikeouts, respectively.
|
||||
# there were {result[12]}, {result[13]}, and {result[14]} groundouts, respectively.
|
||||
# there were {result[15]}, {result[16]}, and {result[17]} flyouts, respectively.""")
|
||||
|
||||
|
||||
def debug_game(): #returns a game object ready to run
|
||||
average_player = player('{"id" : "average", "name" : "AJ", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player2 = player('{"id" : "average", "name" : "Astrid", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player3 = player('{"id" : "average", "name" : "xvi", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player4 = player('{"id" : "average", "name" : "Fox", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player5 = player('{"id" : "average", "name" : "Pigeon", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
max_player = player('{"id" : "max", "name" : "max", "batting_stars" : 5, "pitching_stars" : 5, "defense_stars" : 5, "baserunning_stars" : 5}')
|
||||
min_player = player('{"id" : "min", "name" : "min", "batting_stars" : 1, "pitching_stars" : 1, "defense_stars" : 1, "baserunning_stars" : 1}')
|
||||
team_avg = team()
|
||||
team_avg.name = "Arizona Aways"
|
||||
team_avg.add_lineup(average_player)
|
||||
team_avg.add_lineup(average_player2)
|
||||
team_avg.add_lineup(average_player3)
|
||||
team_avg.add_lineup(average_player4)
|
||||
team_avg.set_pitcher(average_player5)
|
||||
team_avg.finalize()
|
||||
team_avg2 = team()
|
||||
team_avg2.name = "Houston Homes"
|
||||
team_avg2.add_lineup(average_player5)
|
||||
team_avg2.add_lineup(average_player4)
|
||||
team_avg2.add_lineup(average_player3)
|
||||
team_avg2.add_lineup(average_player2)
|
||||
team_avg2.set_pitcher(average_player)
|
||||
team_avg2.finalize()
|
||||
team_min = team()
|
||||
team_min.add_lineup(min_player)
|
||||
team_min.set_pitcher(min_player)
|
||||
team_min.finalize()
|
||||
|
||||
average_game = game("test", team_avg, team_avg2)
|
||||
#slugging_game = game(team_max, team_min)
|
||||
#shutout_game = game(team_min, team_max)
|
||||
|
||||
return average_game
|
61
games.py
61
games.py
|
@ -1,4 +1,4 @@
|
|||
import json, random, os, math
|
||||
import json, random, os, math, jsonpickle
|
||||
from enum import Enum
|
||||
import database as db
|
||||
|
||||
|
@ -82,6 +82,7 @@ class team(object):
|
|||
self.lineup_position = 0
|
||||
self.pitcher = None
|
||||
self.score = 0
|
||||
self.slogan = None
|
||||
|
||||
def add_lineup(self, new_player):
|
||||
if len(self.lineup) <= 12:
|
||||
|
@ -97,6 +98,14 @@ class team(object):
|
|||
def is_ready(self):
|
||||
return (len(self.lineup) >= 1 and self.pitcher is not None)
|
||||
|
||||
def prepare_for_save(self):
|
||||
self.lineup_position = 0
|
||||
self.score = 0
|
||||
for this_player in self.lineup:
|
||||
for stat in this_player.game_stats.keys():
|
||||
this_player.game_stats[stat] = 0
|
||||
return True
|
||||
|
||||
def finalize(self):
|
||||
if self.is_ready():
|
||||
while len(self.lineup) <= 4:
|
||||
|
@ -124,6 +133,7 @@ class game(object):
|
|||
self.max_innings = config()["default_length"]
|
||||
self.bases = {1 : None, 2 : None, 3 : None}
|
||||
|
||||
|
||||
def get_batter(self):
|
||||
if self.top_of_inning:
|
||||
bat_team = self.teams["away"]
|
||||
|
@ -443,37 +453,20 @@ def random_star_gen(key, player):
|
|||
# strikeouts_taken
|
||||
|
||||
|
||||
def debug_game():
|
||||
average_player = player('{"id" : "average", "name" : "AJ", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player2 = player('{"id" : "average", "name" : "Astrid", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player3 = player('{"id" : "average", "name" : "xvi", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player4 = player('{"id" : "average", "name" : "Fox", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
average_player5 = player('{"id" : "average", "name" : "Pigeon", "batting_stars" : 2.5, "pitching_stars" : 2.5, "defense_stars" : 2.5, "baserunning_stars" : 2.5}')
|
||||
max_player = player('{"id" : "max", "name" : "max", "batting_stars" : 5, "pitching_stars" : 5, "defense_stars" : 5, "baserunning_stars" : 5}')
|
||||
min_player = player('{"id" : "min", "name" : "min", "batting_stars" : 1, "pitching_stars" : 1, "defense_stars" : 1, "baserunning_stars" : 1}')
|
||||
team_avg = team()
|
||||
team_avg.name = "Arizona Aways"
|
||||
team_avg.add_lineup(average_player)
|
||||
team_avg.add_lineup(average_player2)
|
||||
team_avg.add_lineup(average_player3)
|
||||
team_avg.add_lineup(average_player4)
|
||||
team_avg.set_pitcher(average_player5)
|
||||
team_avg.finalize()
|
||||
team_avg2 = team()
|
||||
team_avg2.name = "Houston Homes"
|
||||
team_avg2.add_lineup(average_player5)
|
||||
team_avg2.add_lineup(average_player4)
|
||||
team_avg2.add_lineup(average_player3)
|
||||
team_avg2.add_lineup(average_player2)
|
||||
team_avg2.set_pitcher(average_player)
|
||||
team_avg2.finalize()
|
||||
team_min = team()
|
||||
team_min.add_lineup(min_player)
|
||||
team_min.set_pitcher(min_player)
|
||||
team_min.finalize()
|
||||
def get_team(name):
|
||||
#try:
|
||||
team_json = jsonpickle.decode(db.get_team(name), keys=True, classes=team)
|
||||
if team_json is not None:
|
||||
return team_json
|
||||
return None
|
||||
# except:
|
||||
#return None
|
||||
|
||||
average_game = game("test", team_avg, team_avg2)
|
||||
#slugging_game = game(team_max, team_min)
|
||||
#shutout_game = game(team_min, team_max)
|
||||
|
||||
return average_game
|
||||
def save_team(this_team):
|
||||
try:
|
||||
this_team.prepare_for_save()
|
||||
team_json_string = jsonpickle.encode(this_team, keys=True)
|
||||
db.save_team(this_team.name, team_json_string)
|
||||
return True
|
||||
except:
|
||||
return None
|
|
@ -0,0 +1 @@
|
|||
pip
|
|
@ -0,0 +1,29 @@
|
|||
Copyright (C) 2008 John Paulett (john -at- paulett.org)
|
||||
Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in
|
||||
the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
3. The name of the author may not be used to endorse or promote
|
||||
products derived from this software without specific prior
|
||||
written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
|
||||
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
|
||||
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
|
||||
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
||||
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
178
matteo_env/Lib/site-packages/jsonpickle-1.4.2.dist-info/METADATA
Normal file
178
matteo_env/Lib/site-packages/jsonpickle-1.4.2.dist-info/METADATA
Normal file
|
@ -0,0 +1,178 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: jsonpickle
|
||||
Version: 1.4.2
|
||||
Summary: Python library for serializing any arbitrary object graph into JSON
|
||||
Home-page: https://github.com/jsonpickle/jsonpickle
|
||||
Author: David Aguilar
|
||||
Author-email: davvid@gmail.com
|
||||
License: UNKNOWN
|
||||
Keywords: json pickle,json,pickle,marshal,serialization,JavaScript Object Notation
|
||||
Platform: POSIX
|
||||
Platform: Windows
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: JavaScript
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=2.7
|
||||
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx ; extra == 'docs'
|
||||
Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
|
||||
Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
|
||||
Provides-Extra: testing
|
||||
Requires-Dist: coverage (<5) ; extra == 'testing'
|
||||
Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
|
||||
Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing'
|
||||
Requires-Dist: pytest-flake8 ; extra == 'testing'
|
||||
Requires-Dist: pytest-black-multipy ; extra == 'testing'
|
||||
Requires-Dist: pytest-cov ; extra == 'testing'
|
||||
Requires-Dist: ecdsa ; extra == 'testing'
|
||||
Requires-Dist: feedparser ; extra == 'testing'
|
||||
Requires-Dist: numpy ; extra == 'testing'
|
||||
Requires-Dist: pandas ; extra == 'testing'
|
||||
Requires-Dist: pymongo ; extra == 'testing'
|
||||
Requires-Dist: sqlalchemy ; extra == 'testing'
|
||||
Provides-Extra: testing.libs
|
||||
Requires-Dist: demjson ; extra == 'testing.libs'
|
||||
Requires-Dist: simplejson ; extra == 'testing.libs'
|
||||
Requires-Dist: ujson ; extra == 'testing.libs'
|
||||
Requires-Dist: yajl ; extra == 'testing.libs'
|
||||
Requires-Dist: enum34 ; (python_version == "2.7") and extra == 'testing'
|
||||
Requires-Dist: jsonlib ; (python_version == "2.7") and extra == 'testing'
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/jsonpickle.svg
|
||||
:target: `PyPI link`_
|
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/jsonpickle.svg
|
||||
:target: `PyPI link`_
|
||||
|
||||
.. _PyPI link: https://pypi.org/project/jsonpickle
|
||||
|
||||
.. image:: https://dev.azure.com/jaraco/jsonpickle/_apis/build/status/jaraco.jsonpickle?branchName=master
|
||||
:target: https://dev.azure.com/jaraco/jsonpickle/_build/latest?definitionId=1&branchName=master
|
||||
|
||||
.. image:: https://readthedocs.org/projects/jsonpickle/badge/?version=latest
|
||||
:target: https://jsonpickle.readthedocs.io/en/latest/?badge=latest
|
||||
|
||||
.. image:: https://travis-ci.org/jsonpickle/jsonpickle.svg?branch=master
|
||||
:target: https://travis-ci.org/jsonpickle/jsonpickle
|
||||
:alt: travis
|
||||
|
||||
.. image:: https://img.shields.io/badge/License-BSD%203--Clause-blue.svg
|
||||
:target: https://github.com/jsonpickle/jsonpickle/blob/master/COPYING
|
||||
:alt: BSD
|
||||
|
||||
|
||||
jsonpickle
|
||||
==========
|
||||
jsonpickle is a library for the two-way conversion of complex Python objects
|
||||
and `JSON <http://json.org/>`_. jsonpickle builds upon the existing JSON
|
||||
encoders, such as simplejson, json, and demjson.
|
||||
|
||||
For complete documentation, please visit the
|
||||
`jsonpickle documentation <http://jsonpickle.readthedocs.io/>`_.
|
||||
|
||||
Bug reports and merge requests are encouraged at the
|
||||
`jsonpickle repository on github <https://github.com/jsonpickle/jsonpickle>`_.
|
||||
|
||||
jsonpickle supports Python 2.7 and Python 3.4 or greater.
|
||||
|
||||
**WARNING**:
|
||||
jsonpickle can execute arbitrary Python code. Do not load jsonpickles from untrusted / unauthenticated sources.
|
||||
|
||||
Why jsonpickle?
|
||||
===============
|
||||
Data serialized with python's pickle (or cPickle or dill) is not easily readable outside of python. Using the json format, jsonpickle allows simple data types to be stored in a human-readable format, and more complex data types such as numpy arrays and pandas dataframes, to be machine-readable on any platform that supports json. E.g., unlike pickled data, jsonpickled data stored in an Amazon S3 bucket is indexible by Amazon's Athena.
|
||||
|
||||
Install
|
||||
=======
|
||||
|
||||
Install from pip for the latest stable release:
|
||||
|
||||
::
|
||||
|
||||
pip install jsonpickle
|
||||
|
||||
Install from github for the latest changes:
|
||||
|
||||
::
|
||||
|
||||
pip install git+https://github.com/jsonpickle/jsonpickle.git
|
||||
|
||||
If you have the files checked out for development:
|
||||
|
||||
::
|
||||
|
||||
git clone https://github.com/jsonpickle/jsonpickle.git
|
||||
cd jsonpickle
|
||||
python setup.py develop
|
||||
|
||||
|
||||
Numpy Support
|
||||
=============
|
||||
jsonpickle includes a built-in numpy extension. If would like to encode
|
||||
sklearn models, numpy arrays, and other numpy-based data then you must
|
||||
enable the numpy extension by registering its handlers::
|
||||
|
||||
>>> import jsonpickle.ext.numpy as jsonpickle_numpy
|
||||
>>> jsonpickle_numpy.register_handlers()
|
||||
|
||||
Pandas Support
|
||||
==============
|
||||
jsonpickle includes a built-in pandas extension. If would like to encode
|
||||
pandas DataFrame or Series objects then you must enable the pandas extension
|
||||
by registering its handlers::
|
||||
|
||||
>>> import jsonpickle.ext.pandas as jsonpickle_pandas
|
||||
>>> jsonpickle_pandas.register_handlers()
|
||||
|
||||
jsonpickleJS
|
||||
============
|
||||
`jsonpickleJS <https://github.com/cuthbertLab/jsonpickleJS>`_
|
||||
is a javascript implementation of jsonpickle by Michael Scott Cuthbert.
|
||||
jsonpickleJS can be extremely useful for projects that have parallel data
|
||||
structures between Python and Javascript.
|
||||
|
||||
License
|
||||
=======
|
||||
Licensed under the BSD License. See COPYING for details.
|
||||
See jsonpickleJS/LICENSE for details about the jsonpickleJS license.
|
||||
|
||||
Development
|
||||
===========
|
||||
|
||||
Use `make` to run the unit tests::
|
||||
|
||||
make test
|
||||
|
||||
`pytest` is used to run unit tests internally.
|
||||
|
||||
A `tox` target is provided to run tests using tox.
|
||||
Setting ``multi=1`` tests using all installed and supported Python versions::
|
||||
|
||||
make tox
|
||||
make tox multi=1
|
||||
|
||||
`jsonpickle` itself has no dependencies beyond the Python stdlib.
|
||||
`tox` is required for testing when using the `tox` test runner only.
|
||||
|
||||
The testing requirements are specified in `requirements-dev.txt`.
|
||||
It is recommended to create a virtualenv and run tests from within the
|
||||
virtualenv, or use a tool such as `vx <https://github.com/davvid/vx/>`_
|
||||
to activate the virtualenv without polluting the shell environment::
|
||||
|
||||
python3 -mvenv env3x
|
||||
vx env3x pip install --requirement requirements-dev.txt
|
||||
vx env3x make test
|
||||
|
||||
`jsonpickle` supports multiple Python versions, so using a combination of
|
||||
multiple virtualenvs and `tox` is useful in order to catch compatibility
|
||||
issues when developing.
|
||||
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
jsonpickle-1.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
jsonpickle-1.4.2.dist-info/LICENSE,sha256=x0J1XN545Tr9tDrZCzeskCyA81D41CGSSYCKl2WclqY,1493
|
||||
jsonpickle-1.4.2.dist-info/METADATA,sha256=m7dcuYs8dSPINpiD6jAFt-AYqtHUNxuWQTifHFFGcP8,6588
|
||||
jsonpickle-1.4.2.dist-info/RECORD,,
|
||||
jsonpickle-1.4.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jsonpickle-1.4.2.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
|
||||
jsonpickle-1.4.2.dist-info/top_level.txt,sha256=cdRDYlLc4XOT2KzzPurieMn-XW_3YxFzopwdeDqPFVs,11
|
||||
jsonpickle/__init__.py,sha256=1Az0ZGQbumVkaxKs0VC51yK0lwspN9N-v4menGQeRes,2585
|
||||
jsonpickle/__pycache__/__init__.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/backend.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/compat.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/handlers.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/pickler.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/tags.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/unpickler.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/util.cpython-38.pyc,,
|
||||
jsonpickle/__pycache__/version.cpython-38.pyc,,
|
||||
jsonpickle/backend.py,sha256=_OdeFpbnbqOkb6C6MIe95ohNHeDpLBIswVuNTHCmZJE,10312
|
||||
jsonpickle/compat.py,sha256=5rkux_slQiyqI6dODJUK7VdDM_x5ksfJbgz4A4OS5_Q,1025
|
||||
jsonpickle/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jsonpickle/ext/__pycache__/__init__.cpython-38.pyc,,
|
||||
jsonpickle/ext/__pycache__/numpy.cpython-38.pyc,,
|
||||
jsonpickle/ext/__pycache__/pandas.cpython-38.pyc,,
|
||||
jsonpickle/ext/numpy.py,sha256=w2YgZqR3FSE6u3hPz9ririDBkYIAtrInjeGlDWHJ3qw,12339
|
||||
jsonpickle/ext/pandas.py,sha256=LHflgvnRvdUqp1QWO2lBgc7i9sOtsLk5_rvmfEFgljE,7003
|
||||
jsonpickle/handlers.py,sha256=yvAI7unS9dcRPYvOws9YqUNiRA9BwbNjX2Vx-gB768c,8460
|
||||
jsonpickle/pickler.py,sha256=v0ahAa62IoHyOqtOLSGSHQbZIXuqMGPdbP5RKddqyqw,26032
|
||||
jsonpickle/tags.py,sha256=QSrEwfvGvqPJxy6KkTKskDKqstm76RulfX8uBrbfn70,986
|
||||
jsonpickle/unpickler.py,sha256=yW7dVzyAHnz5ECnl-KKveZAizJLwraRC9h3lqU5SuMU,24807
|
||||
jsonpickle/util.py,sha256=L4V6l8AXXgk9dsU4nz271wpcV7AjsUX6BbLHwHFeHF8,14089
|
||||
jsonpickle/version.py,sha256=blvoKVdqIhtMALqv8aFBVj56EYIXUJJMAUkZTec9Ttg,448
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.35.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -0,0 +1 @@
|
|||
jsonpickle
|
85
matteo_env/Lib/site-packages/jsonpickle/__init__.py
Normal file
85
matteo_env/Lib/site-packages/jsonpickle/__init__.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
|
||||
# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This software is licensed as described in the file COPYING, which
|
||||
# you should have received as part of this distribution.
|
||||
|
||||
"""Python library for serializing any arbitrary object graph into JSON.
|
||||
|
||||
.. warning::
|
||||
|
||||
jsonpickle can execute arbitrary Python code. Do not load jsonpickles from
|
||||
untrusted / unauthenticated sources.
|
||||
|
||||
jsonpickle can take almost any Python object and turn the object into JSON.
|
||||
Additionally, it can reconstitute the object back into Python.
|
||||
|
||||
The object must be accessible globally via a module and must
|
||||
inherit from object (AKA new-style classes).
|
||||
|
||||
Create an object::
|
||||
|
||||
class Thing(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
obj = Thing('Awesome')
|
||||
|
||||
Use jsonpickle to transform the object into a JSON string::
|
||||
|
||||
import jsonpickle
|
||||
frozen = jsonpickle.encode(obj)
|
||||
|
||||
Use jsonpickle to recreate a Python object from a JSON string::
|
||||
|
||||
thawed = jsonpickle.decode(frozen)
|
||||
|
||||
The new object has the same type and data, but essentially is now a copy of
|
||||
the original.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
assert obj.name == thawed.name
|
||||
|
||||
If you will never need to load (regenerate the Python class from JSON), you can
|
||||
pass in the keyword unpicklable=False to prevent extra information from being
|
||||
added to JSON::
|
||||
|
||||
oneway = jsonpickle.encode(obj, unpicklable=False)
|
||||
result = jsonpickle.decode(oneway)
|
||||
assert obj.name == result['name'] == 'Awesome'
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from .backend import json
|
||||
from .pickler import encode
|
||||
from .unpickler import decode
|
||||
|
||||
# Export other names not in __all__
|
||||
from .backend import JSONBackend # noqa: F401
|
||||
from .version import __version__ # noqa: F401
|
||||
from .handlers import register # noqa: F401
|
||||
from .handlers import unregister # noqa: F401
|
||||
from .pickler import Pickler # noqa: F401
|
||||
from .unpickler import Unpickler # noqa: F401
|
||||
|
||||
__all__ = ('encode', 'decode')
|
||||
|
||||
# register built-in handlers
|
||||
__import__('jsonpickle.handlers', level=0)
|
||||
|
||||
# Export specific JSONPluginMgr methods into the jsonpickle namespace
|
||||
set_preferred_backend = json.set_preferred_backend
|
||||
set_decoder_options = json.set_decoder_options
|
||||
set_encoder_options = json.set_encoder_options
|
||||
load_backend = json.load_backend
|
||||
remove_backend = json.remove_backend
|
||||
enable_fallthrough = json.enable_fallthrough
|
||||
|
||||
# json.load(), loads(), dump(), dumps() compatibility
|
||||
dumps = encode
|
||||
loads = decode
|
294
matteo_env/Lib/site-packages/jsonpickle/backend.py
Normal file
294
matteo_env/Lib/site-packages/jsonpickle/backend.py
Normal file
|
@ -0,0 +1,294 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from .compat import string_types
|
||||
from .compat import PY3_ORDERED_DICT
|
||||
|
||||
|
||||
class JSONBackend(object):
|
||||
"""Manages encoding and decoding using various backends.
|
||||
|
||||
It tries these modules in this order:
|
||||
simplejson, json, demjson
|
||||
|
||||
simplejson is a fast and popular backend and is tried first.
|
||||
json comes with Python and is tried second.
|
||||
demjson is the most permissive backend and is tried last.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, fallthrough=True):
|
||||
# Whether we should fallthrough to the next backend
|
||||
self._fallthrough = fallthrough
|
||||
# The names of backends that have been successfully imported
|
||||
self._backend_names = []
|
||||
|
||||
# A dictionary mapping backend names to encode/decode functions
|
||||
self._encoders = {}
|
||||
self._decoders = {}
|
||||
|
||||
# Options to pass to specific encoders
|
||||
self._encoder_options = {}
|
||||
|
||||
# Options to pass to specific decoders
|
||||
self._decoder_options = {}
|
||||
|
||||
# The exception class that is thrown when a decoding error occurs
|
||||
self._decoder_exceptions = {}
|
||||
|
||||
# Whether we've loaded any backends successfully
|
||||
self._verified = False
|
||||
|
||||
self.load_backend('simplejson')
|
||||
self.load_backend('json')
|
||||
self.load_backend('demjson', 'encode', 'decode', 'JSONDecodeError')
|
||||
self.load_backend('jsonlib', 'write', 'read', 'ReadError')
|
||||
self.load_backend('yajl')
|
||||
self.load_backend('ujson')
|
||||
|
||||
# Defaults for various encoders
|
||||
sort = not PY3_ORDERED_DICT
|
||||
json_opts = ((), {'sort_keys': sort})
|
||||
self._encoder_options = {
|
||||
'ujson': ((), {'sort_keys': sort, 'escape_forward_slashes': False}),
|
||||
'json': json_opts,
|
||||
'simplejson': json_opts,
|
||||
'django.util.simplejson': json_opts,
|
||||
}
|
||||
|
||||
def _verify(self):
|
||||
"""Ensures that we've loaded at least one JSON backend."""
|
||||
if self._verified:
|
||||
return
|
||||
raise AssertionError(
|
||||
'jsonpickle requires at least one of the '
|
||||
'following:\n'
|
||||
' python2.6, simplejson, or demjson'
|
||||
)
|
||||
|
||||
def enable_fallthrough(self, enable):
|
||||
"""
|
||||
Disable jsonpickle's fallthrough-on-error behavior
|
||||
|
||||
By default, jsonpickle tries the next backend when decoding or
|
||||
encoding using a backend fails.
|
||||
|
||||
This can make it difficult to force jsonpickle to use a specific
|
||||
backend, and catch errors, because the error will be suppressed and
|
||||
may not be raised by the subsequent backend.
|
||||
|
||||
Calling `enable_backend(False)` will make jsonpickle immediately
|
||||
re-raise any exceptions raised by the backends.
|
||||
|
||||
"""
|
||||
self._fallthrough = enable
|
||||
|
||||
def load_backend(self, name, dumps='dumps', loads='loads', loads_exc=ValueError):
|
||||
|
||||
"""Load a JSON backend by name.
|
||||
|
||||
This method loads a backend and sets up references to that
|
||||
backend's loads/dumps functions and exception classes.
|
||||
|
||||
:param dumps: is the name of the backend's encode method.
|
||||
The method should take an object and return a string.
|
||||
Defaults to 'dumps'.
|
||||
:param loads: names the backend's method for the reverse
|
||||
operation -- returning a Python object from a string.
|
||||
:param loads_exc: can be either the name of the exception class
|
||||
used to denote decoding errors, or it can be a direct reference
|
||||
to the appropriate exception class itself. If it is a name,
|
||||
then the assumption is that an exception class of that name
|
||||
can be found in the backend module's namespace.
|
||||
:param load: names the backend's 'load' method.
|
||||
:param dump: names the backend's 'dump' method.
|
||||
:rtype bool: True on success, False if the backend could not be loaded.
|
||||
|
||||
"""
|
||||
try:
|
||||
# Load the JSON backend
|
||||
mod = __import__(name)
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
# Handle submodules, e.g. django.utils.simplejson
|
||||
try:
|
||||
for attr in name.split('.')[1:]:
|
||||
mod = getattr(mod, attr)
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
if not self._store(self._encoders, name, mod, dumps) or not self._store(
|
||||
self._decoders, name, mod, loads
|
||||
):
|
||||
return False
|
||||
|
||||
if isinstance(loads_exc, string_types):
|
||||
# This backend's decoder exception is part of the backend
|
||||
if not self._store(self._decoder_exceptions, name, mod, loads_exc):
|
||||
return False
|
||||
else:
|
||||
# simplejson uses ValueError
|
||||
self._decoder_exceptions[name] = loads_exc
|
||||
|
||||
# Setup the default args and kwargs for this encoder/decoder
|
||||
self._encoder_options.setdefault(name, ([], {}))
|
||||
self._decoder_options.setdefault(name, ([], {}))
|
||||
|
||||
# Add this backend to the list of candidate backends
|
||||
self._backend_names.append(name)
|
||||
|
||||
# Indicate that we successfully loaded a JSON backend
|
||||
self._verified = True
|
||||
return True
|
||||
|
||||
def remove_backend(self, name):
|
||||
"""Remove all entries for a particular backend."""
|
||||
self._encoders.pop(name, None)
|
||||
self._decoders.pop(name, None)
|
||||
self._decoder_exceptions.pop(name, None)
|
||||
self._decoder_options.pop(name, None)
|
||||
self._encoder_options.pop(name, None)
|
||||
if name in self._backend_names:
|
||||
self._backend_names.remove(name)
|
||||
self._verified = bool(self._backend_names)
|
||||
|
||||
def encode(self, obj, indent=None, separators=None):
|
||||
"""
|
||||
Attempt to encode an object into JSON.
|
||||
|
||||
This tries the loaded backends in order and passes along the last
|
||||
exception if no backend is able to encode the object.
|
||||
|
||||
"""
|
||||
self._verify()
|
||||
|
||||
if not self._fallthrough:
|
||||
name = self._backend_names[0]
|
||||
return self.backend_encode(name, obj, indent=indent, separators=separators)
|
||||
|
||||
for idx, name in enumerate(self._backend_names):
|
||||
try:
|
||||
return self.backend_encode(
|
||||
name, obj, indent=indent, separators=separators
|
||||
)
|
||||
except Exception as e:
|
||||
if idx == len(self._backend_names) - 1:
|
||||
raise e
|
||||
|
||||
# def dumps
|
||||
dumps = encode
|
||||
|
||||
def backend_encode(self, name, obj, indent=None, separators=None):
|
||||
optargs, optkwargs = self._encoder_options.get(name, ([], {}))
|
||||
encoder_kwargs = optkwargs.copy()
|
||||
if indent is not None:
|
||||
encoder_kwargs['indent'] = indent
|
||||
if separators is not None:
|
||||
encoder_kwargs['separators'] = separators
|
||||
encoder_args = (obj,) + tuple(optargs)
|
||||
return self._encoders[name](*encoder_args, **encoder_kwargs)
|
||||
|
||||
def decode(self, string):
|
||||
"""
|
||||
Attempt to decode an object from a JSON string.
|
||||
|
||||
This tries the loaded backends in order and passes along the last
|
||||
exception if no backends are able to decode the string.
|
||||
|
||||
"""
|
||||
self._verify()
|
||||
|
||||
if not self._fallthrough:
|
||||
name = self._backend_names[0]
|
||||
return self.backend_decode(name, string)
|
||||
|
||||
for idx, name in enumerate(self._backend_names):
|
||||
try:
|
||||
return self.backend_decode(name, string)
|
||||
except self._decoder_exceptions[name] as e:
|
||||
if idx == len(self._backend_names) - 1:
|
||||
raise e
|
||||
else:
|
||||
pass # and try a more forgiving encoder, e.g. demjson
|
||||
|
||||
# def loads
|
||||
loads = decode
|
||||
|
||||
def backend_decode(self, name, string):
|
||||
optargs, optkwargs = self._decoder_options.get(name, ((), {}))
|
||||
decoder_kwargs = optkwargs.copy()
|
||||
return self._decoders[name](string, *optargs, **decoder_kwargs)
|
||||
|
||||
def set_preferred_backend(self, name):
|
||||
"""
|
||||
Set the preferred json backend.
|
||||
|
||||
If a preferred backend is set then jsonpickle tries to use it
|
||||
before any other backend.
|
||||
|
||||
For example::
|
||||
|
||||
set_preferred_backend('simplejson')
|
||||
|
||||
If the backend is not one of the built-in jsonpickle backends
|
||||
(json/simplejson, or demjson) then you must load the backend
|
||||
prior to calling set_preferred_backend.
|
||||
|
||||
AssertionError is raised if the backend has not been loaded.
|
||||
|
||||
"""
|
||||
if name in self._backend_names:
|
||||
self._backend_names.remove(name)
|
||||
self._backend_names.insert(0, name)
|
||||
else:
|
||||
errmsg = 'The "%s" backend has not been loaded.' % name
|
||||
raise AssertionError(errmsg)
|
||||
|
||||
def set_encoder_options(self, name, *args, **kwargs):
|
||||
"""
|
||||
Associate encoder-specific options with an encoder.
|
||||
|
||||
After calling set_encoder_options, any calls to jsonpickle's
|
||||
encode method will pass the supplied args and kwargs along to
|
||||
the appropriate backend's encode method.
|
||||
|
||||
For example::
|
||||
|
||||
set_encoder_options('simplejson', sort_keys=True, indent=4)
|
||||
set_encoder_options('demjson', compactly=False)
|
||||
|
||||
See the appropriate encoder's documentation for details about
|
||||
the supported arguments and keyword arguments.
|
||||
|
||||
"""
|
||||
self._encoder_options[name] = (args, kwargs)
|
||||
|
||||
def set_decoder_options(self, name, *args, **kwargs):
|
||||
"""
|
||||
Associate decoder-specific options with a decoder.
|
||||
|
||||
After calling set_decoder_options, any calls to jsonpickle's
|
||||
decode method will pass the supplied args and kwargs along to
|
||||
the appropriate backend's decode method.
|
||||
|
||||
For example::
|
||||
|
||||
set_decoder_options('simplejson', encoding='utf8', cls=JSONDecoder)
|
||||
set_decoder_options('demjson', strict=True)
|
||||
|
||||
See the appropriate decoder's documentation for details about
|
||||
the supported arguments and keyword arguments.
|
||||
|
||||
"""
|
||||
self._decoder_options[name] = (args, kwargs)
|
||||
|
||||
def _store(self, dct, backend, obj, name):
|
||||
try:
|
||||
dct[backend] = getattr(obj, name)
|
||||
except AttributeError:
|
||||
self.remove_backend(backend)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
json = JSONBackend()
|
39
matteo_env/Lib/site-packages/jsonpickle/compat.py
Normal file
39
matteo_env/Lib/site-packages/jsonpickle/compat.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
import sys
|
||||
import types
|
||||
import base64
|
||||
|
||||
PY_MAJOR = sys.version_info[0]
|
||||
PY2 = PY_MAJOR == 2
|
||||
PY3 = PY_MAJOR == 3
|
||||
PY3_ORDERED_DICT = PY3 and sys.version_info[1] >= 6 # Python 3.6+
|
||||
|
||||
class_types = (type,)
|
||||
iterator_types = (type(iter('')),)
|
||||
|
||||
if PY3:
|
||||
import builtins
|
||||
import queue
|
||||
from base64 import encodebytes, decodebytes
|
||||
from collections.abc import Iterator as abc_iterator
|
||||
|
||||
string_types = (str,)
|
||||
numeric_types = (int, float)
|
||||
ustr = str
|
||||
else:
|
||||
from collections import Iterator as abc_iterator # noqa
|
||||
|
||||
builtins = __import__('__builtin__')
|
||||
class_types += (types.ClassType,)
|
||||
encodebytes = base64.encodestring
|
||||
decodebytes = base64.decodestring
|
||||
string_types = (builtins.basestring,)
|
||||
numeric_types = (int, float, builtins.long)
|
||||
queue = __import__('Queue')
|
||||
ustr = builtins.unicode
|
||||
|
||||
|
||||
def iterator(class_):
|
||||
if PY2 and hasattr(class_, '__next__'):
|
||||
class_.next = class_.__next__
|
||||
return class_
|
337
matteo_env/Lib/site-packages/jsonpickle/ext/numpy.py
Normal file
337
matteo_env/Lib/site-packages/jsonpickle/ext/numpy.py
Normal file
|
@ -0,0 +1,337 @@
|
|||
from __future__ import absolute_import
|
||||
import ast
|
||||
import sys
|
||||
import zlib
|
||||
import warnings
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
|
||||
from ..handlers import BaseHandler, register, unregister
|
||||
from ..compat import numeric_types
|
||||
from ..util import b64decode, b64encode
|
||||
from .. import compat
|
||||
|
||||
|
||||
__all__ = ['register_handlers', 'unregister_handlers']
|
||||
|
||||
native_byteorder = '<' if sys.byteorder == 'little' else '>'
|
||||
|
||||
|
||||
def get_byteorder(arr):
|
||||
"""translate equals sign to native order"""
|
||||
byteorder = arr.dtype.byteorder
|
||||
return native_byteorder if byteorder == '=' else byteorder
|
||||
|
||||
|
||||
class NumpyBaseHandler(BaseHandler):
|
||||
def flatten_dtype(self, dtype, data):
|
||||
if hasattr(dtype, 'tostring'):
|
||||
data['dtype'] = dtype.tostring()
|
||||
else:
|
||||
dtype = compat.ustr(dtype)
|
||||
prefix = '(numpy.record, '
|
||||
if dtype.startswith(prefix):
|
||||
dtype = dtype[len(prefix) : -1]
|
||||
data['dtype'] = dtype
|
||||
|
||||
def restore_dtype(self, data):
|
||||
dtype = data['dtype']
|
||||
if dtype.startswith(('{', '[')):
|
||||
dtype = ast.literal_eval(dtype)
|
||||
return np.dtype(dtype)
|
||||
|
||||
|
||||
class NumpyDTypeHandler(NumpyBaseHandler):
|
||||
def flatten(self, obj, data):
|
||||
self.flatten_dtype(obj, data)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
return self.restore_dtype(data)
|
||||
|
||||
|
||||
class NumpyGenericHandler(NumpyBaseHandler):
|
||||
def flatten(self, obj, data):
|
||||
self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
|
||||
data['value'] = self.context.flatten(obj.tolist(), reset=False)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
value = self.context.restore(data['value'], reset=False)
|
||||
return self.restore_dtype(data).type(value)
|
||||
|
||||
|
||||
class NumpyNDArrayHandler(NumpyBaseHandler):
|
||||
"""Stores arrays as text representation, without regard for views"""
|
||||
|
||||
def flatten_flags(self, obj, data):
|
||||
if obj.flags.writeable is False:
|
||||
data['writeable'] = False
|
||||
|
||||
def restore_flags(self, data, arr):
|
||||
if not data.get('writeable', True):
|
||||
arr.flags.writeable = False
|
||||
|
||||
def flatten(self, obj, data):
|
||||
self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
|
||||
self.flatten_flags(obj, data)
|
||||
data['values'] = self.context.flatten(obj.tolist(), reset=False)
|
||||
if 0 in obj.shape:
|
||||
# add shape information explicitly as it cannot be
|
||||
# inferred from an empty list
|
||||
data['shape'] = obj.shape
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
values = self.context.restore(data['values'], reset=False)
|
||||
arr = np.array(
|
||||
values, dtype=self.restore_dtype(data), order=data.get('order', 'C')
|
||||
)
|
||||
shape = data.get('shape', None)
|
||||
if shape is not None:
|
||||
arr = arr.reshape(shape)
|
||||
|
||||
self.restore_flags(data, arr)
|
||||
return arr
|
||||
|
||||
|
||||
class NumpyNDArrayHandlerBinary(NumpyNDArrayHandler):
|
||||
"""stores arrays with size greater than 'size_threshold' as
|
||||
(optionally) compressed base64
|
||||
|
||||
Notes
|
||||
-----
|
||||
This would be easier to implement using np.save/np.load, but
|
||||
that would be less language-agnostic
|
||||
"""
|
||||
|
||||
def __init__(self, size_threshold=16, compression=zlib):
|
||||
"""
|
||||
:param size_threshold: nonnegative int or None
|
||||
valid values for 'size_threshold' are all nonnegative
|
||||
integers and None
|
||||
if size_threshold is None, values are always stored as nested lists
|
||||
:param compression: a compression module or None
|
||||
valid values for 'compression' are {zlib, bz2, None}
|
||||
if compresion is None, no compression is applied
|
||||
"""
|
||||
self.size_threshold = size_threshold
|
||||
self.compression = compression
|
||||
|
||||
def flatten_byteorder(self, obj, data):
|
||||
byteorder = obj.dtype.byteorder
|
||||
if byteorder != '|':
|
||||
data['byteorder'] = get_byteorder(obj)
|
||||
|
||||
def restore_byteorder(self, data, arr):
|
||||
byteorder = data.get('byteorder', None)
|
||||
if byteorder:
|
||||
arr.dtype = arr.dtype.newbyteorder(byteorder)
|
||||
|
||||
def flatten(self, obj, data):
|
||||
"""encode numpy to json"""
|
||||
if self.size_threshold is None or self.size_threshold >= obj.size:
|
||||
# encode as text
|
||||
data = super(NumpyNDArrayHandlerBinary, self).flatten(obj, data)
|
||||
else:
|
||||
# encode as binary
|
||||
if obj.dtype == np.object:
|
||||
# There's a bug deep in the bowels of numpy that causes a
|
||||
# segfault when round-tripping an ndarray of dtype object.
|
||||
# E.g., the following will result in a segfault:
|
||||
# import numpy as np
|
||||
# arr = np.array([str(i) for i in range(3)],
|
||||
# dtype=np.object)
|
||||
# dtype = arr.dtype
|
||||
# shape = arr.shape
|
||||
# buf = arr.tobytes()
|
||||
# del arr
|
||||
# arr = np.ndarray(buffer=buf, dtype=dtype,
|
||||
# shape=shape).copy()
|
||||
# So, save as a binary-encoded list in this case
|
||||
buf = json.dumps(obj.tolist()).encode()
|
||||
elif hasattr(obj, 'tobytes'):
|
||||
# numpy docstring is lacking as of 1.11.2,
|
||||
# but this is the option we need
|
||||
buf = obj.tobytes(order='a')
|
||||
else:
|
||||
# numpy < 1.9 compatibility
|
||||
buf = obj.tostring(order='a')
|
||||
if self.compression:
|
||||
buf = self.compression.compress(buf)
|
||||
data['values'] = b64encode(buf)
|
||||
data['shape'] = obj.shape
|
||||
self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
|
||||
self.flatten_byteorder(obj, data)
|
||||
self.flatten_flags(obj, data)
|
||||
|
||||
if not obj.flags.c_contiguous:
|
||||
data['order'] = 'F'
|
||||
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
"""decode numpy from json"""
|
||||
values = data['values']
|
||||
if isinstance(values, list):
|
||||
# decode text representation
|
||||
arr = super(NumpyNDArrayHandlerBinary, self).restore(data)
|
||||
elif isinstance(values, numeric_types):
|
||||
# single-value array
|
||||
arr = np.array([values], dtype=self.restore_dtype(data))
|
||||
else:
|
||||
# decode binary representation
|
||||
dtype = self.restore_dtype(data)
|
||||
buf = b64decode(values)
|
||||
if self.compression:
|
||||
buf = self.compression.decompress(buf)
|
||||
# See note above about segfault bug for numpy dtype object. Those
|
||||
# are saved as a list to work around that.
|
||||
if dtype == np.object:
|
||||
values = json.loads(buf.decode())
|
||||
arr = np.array(values, dtype=dtype, order=data.get('order', 'C'))
|
||||
shape = data.get('shape', None)
|
||||
if shape is not None:
|
||||
arr = arr.reshape(shape)
|
||||
else:
|
||||
arr = np.ndarray(
|
||||
buffer=buf,
|
||||
dtype=dtype,
|
||||
shape=data.get('shape'),
|
||||
order=data.get('order', 'C'),
|
||||
).copy() # make a copy, to force the result to own the data
|
||||
self.restore_byteorder(data, arr)
|
||||
self.restore_flags(data, arr)
|
||||
|
||||
return arr
|
||||
|
||||
|
||||
class NumpyNDArrayHandlerView(NumpyNDArrayHandlerBinary):
|
||||
"""Pickles references inside ndarrays, or array-views
|
||||
|
||||
Notes
|
||||
-----
|
||||
The current implementation has some restrictions.
|
||||
|
||||
'base' arrays, or arrays which are viewed by other arrays,
|
||||
must be f-or-c-contiguous.
|
||||
This is not such a large restriction in practice, because all
|
||||
numpy array creation is c-contiguous by default.
|
||||
Relaxing this restriction would be nice though; especially if
|
||||
it can be done without bloating the design too much.
|
||||
|
||||
Furthermore, ndarrays which are views of array-like objects
|
||||
implementing __array_interface__,
|
||||
but which are not themselves nd-arrays, are deepcopied with
|
||||
a warning (by default),
|
||||
as we cannot guarantee whatever custom logic such classes
|
||||
implement is correctly reproduced.
|
||||
"""
|
||||
|
||||
def __init__(self, mode='warn', size_threshold=16, compression=zlib):
|
||||
"""
|
||||
:param mode: {'warn', 'raise', 'ignore'}
|
||||
How to react when encountering array-like objects whos
|
||||
references we cannot safely serialize
|
||||
:param size_threshold: nonnegative int or None
|
||||
valid values for 'size_threshold' are all nonnegative
|
||||
integers and None
|
||||
if size_threshold is None, values are always stored as nested lists
|
||||
:param compression: a compression module or None
|
||||
valid values for 'compression' are {zlib, bz2, None}
|
||||
if compresion is None, no compression is applied
|
||||
"""
|
||||
super(NumpyNDArrayHandlerView, self).__init__(size_threshold, compression)
|
||||
self.mode = mode
|
||||
|
||||
def flatten(self, obj, data):
|
||||
"""encode numpy to json"""
|
||||
base = obj.base
|
||||
if base is None and obj.flags.forc:
|
||||
# store by value
|
||||
data = super(NumpyNDArrayHandlerView, self).flatten(obj, data)
|
||||
# ensure that views on arrays stored as text
|
||||
# are interpreted correctly
|
||||
if not obj.flags.c_contiguous:
|
||||
data['order'] = 'F'
|
||||
elif isinstance(base, np.ndarray) and base.flags.forc:
|
||||
# store by reference
|
||||
data['base'] = self.context.flatten(base, reset=False)
|
||||
|
||||
offset = obj.ctypes.data - base.ctypes.data
|
||||
if offset:
|
||||
data['offset'] = offset
|
||||
|
||||
if not obj.flags.c_contiguous:
|
||||
data['strides'] = obj.strides
|
||||
|
||||
data['shape'] = obj.shape
|
||||
self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
|
||||
self.flatten_flags(obj, data)
|
||||
|
||||
if get_byteorder(obj) != '|':
|
||||
byteorder = 'S' if get_byteorder(obj) != get_byteorder(base) else None
|
||||
if byteorder:
|
||||
data['byteorder'] = byteorder
|
||||
|
||||
if self.size_threshold is None or self.size_threshold >= obj.size:
|
||||
# not used in restore since base is present, but
|
||||
# include values for human-readability
|
||||
super(NumpyNDArrayHandlerBinary, self).flatten(obj, data)
|
||||
else:
|
||||
# store a deepcopy or fail
|
||||
if self.mode == 'warn':
|
||||
msg = (
|
||||
"ndarray is defined by reference to an object "
|
||||
"we do not know how to serialize. "
|
||||
"A deep copy is serialized instead, breaking "
|
||||
"memory aliasing."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
elif self.mode == 'raise':
|
||||
msg = (
|
||||
"ndarray is defined by reference to an object we do "
|
||||
"not know how to serialize."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
data = super(NumpyNDArrayHandlerView, self).flatten(obj.copy(), data)
|
||||
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
"""decode numpy from json"""
|
||||
base = data.get('base', None)
|
||||
if base is None:
|
||||
# decode array with owndata=True
|
||||
arr = super(NumpyNDArrayHandlerView, self).restore(data)
|
||||
else:
|
||||
# decode array view, which references the data of another array
|
||||
base = self.context.restore(base, reset=False)
|
||||
assert (
|
||||
base.flags.forc
|
||||
), "Current implementation assumes base is C or F contiguous"
|
||||
|
||||
arr = np.ndarray(
|
||||
buffer=base.data,
|
||||
dtype=self.restore_dtype(data).newbyteorder(data.get('byteorder', '|')),
|
||||
shape=data.get('shape'),
|
||||
offset=data.get('offset', 0),
|
||||
strides=data.get('strides', None),
|
||||
)
|
||||
|
||||
self.restore_flags(data, arr)
|
||||
|
||||
return arr
|
||||
|
||||
|
||||
def register_handlers():
|
||||
register(np.dtype, NumpyDTypeHandler, base=True)
|
||||
register(np.generic, NumpyGenericHandler, base=True)
|
||||
register(np.ndarray, NumpyNDArrayHandlerView(), base=True)
|
||||
|
||||
|
||||
def unregister_handlers():
|
||||
unregister(np.dtype)
|
||||
unregister(np.generic)
|
||||
unregister(np.ndarray)
|
228
matteo_env/Lib/site-packages/jsonpickle/ext/pandas.py
Normal file
228
matteo_env/Lib/site-packages/jsonpickle/ext/pandas.py
Normal file
|
@ -0,0 +1,228 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import pandas as pd
|
||||
from io import StringIO
|
||||
import zlib
|
||||
|
||||
from .. import encode, decode
|
||||
from ..handlers import BaseHandler, register, unregister
|
||||
from ..util import b64decode, b64encode
|
||||
from .numpy import register_handlers as register_numpy_handlers
|
||||
from .numpy import unregister_handlers as unregister_numpy_handlers
|
||||
|
||||
__all__ = ['register_handlers', 'unregister_handlers']
|
||||
|
||||
|
||||
class PandasProcessor(object):
|
||||
def __init__(self, size_threshold=500, compression=zlib):
|
||||
"""
|
||||
:param size_threshold: nonnegative int or None
|
||||
valid values for 'size_threshold' are all nonnegative
|
||||
integers and None. If size_threshold is None,
|
||||
dataframes are always stored as csv strings
|
||||
:param compression: a compression module or None
|
||||
valid values for 'compression' are {zlib, bz2, None}
|
||||
if compresion is None, no compression is applied
|
||||
"""
|
||||
self.size_threshold = size_threshold
|
||||
self.compression = compression
|
||||
|
||||
def flatten_pandas(self, buf, data, meta=None):
|
||||
if self.size_threshold is not None and len(buf) > self.size_threshold:
|
||||
if self.compression:
|
||||
buf = self.compression.compress(buf.encode())
|
||||
data['comp'] = True
|
||||
data['values'] = b64encode(buf)
|
||||
data['txt'] = False
|
||||
else:
|
||||
data['values'] = buf
|
||||
data['txt'] = True
|
||||
|
||||
data['meta'] = meta
|
||||
return data
|
||||
|
||||
def restore_pandas(self, data):
|
||||
if data.get('txt', True):
|
||||
# It's just text...
|
||||
buf = data['values']
|
||||
else:
|
||||
buf = b64decode(data['values'])
|
||||
if data.get('comp', False):
|
||||
buf = self.compression.decompress(buf).decode()
|
||||
meta = data.get('meta', {})
|
||||
return (buf, meta)
|
||||
|
||||
|
||||
def make_read_csv_params(meta):
|
||||
meta_dtypes = meta.get('dtypes', {})
|
||||
|
||||
parse_dates = []
|
||||
converters = {}
|
||||
dtype = {}
|
||||
for k, v in meta_dtypes.items():
|
||||
if v.startswith('datetime'):
|
||||
parse_dates.append(k)
|
||||
elif v.startswith('complex'):
|
||||
converters[k] = complex
|
||||
else:
|
||||
dtype[k] = v
|
||||
|
||||
return dict(dtype=dtype, parse_dates=parse_dates, converters=converters)
|
||||
|
||||
|
||||
class PandasDfHandler(BaseHandler):
|
||||
pp = PandasProcessor()
|
||||
|
||||
def flatten(self, obj, data):
|
||||
dtype = obj.dtypes.to_dict()
|
||||
|
||||
meta = {'dtypes': {k: str(dtype[k]) for k in dtype}, 'index': encode(obj.index)}
|
||||
|
||||
data = self.pp.flatten_pandas(
|
||||
obj.reset_index(drop=True).to_csv(index=False), data, meta
|
||||
)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
csv, meta = self.pp.restore_pandas(data)
|
||||
params = make_read_csv_params(meta)
|
||||
df = (
|
||||
pd.read_csv(StringIO(csv), **params)
|
||||
if data['values'].strip()
|
||||
else pd.DataFrame()
|
||||
)
|
||||
df.set_index(decode(meta['index']), inplace=True)
|
||||
return df
|
||||
|
||||
|
||||
class PandasSeriesHandler(BaseHandler):
|
||||
pp = PandasProcessor()
|
||||
|
||||
def flatten(self, obj, data):
|
||||
"""Flatten the index and values for reconstruction"""
|
||||
data['name'] = obj.name
|
||||
# This relies on the numpy handlers for the inner guts.
|
||||
data['index'] = self.context.flatten(obj.index, reset=False)
|
||||
data['values'] = self.context.flatten(obj.values, reset=False)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
"""Restore the flattened data"""
|
||||
name = data['name']
|
||||
index = self.context.restore(data['index'], reset=False)
|
||||
values = self.context.restore(data['values'], reset=False)
|
||||
return pd.Series(values, index=index, name=name)
|
||||
|
||||
|
||||
class PandasIndexHandler(BaseHandler):
|
||||
|
||||
pp = PandasProcessor()
|
||||
index_constructor = pd.Index
|
||||
|
||||
def name_bundler(self, obj):
|
||||
return {'name': obj.name}
|
||||
|
||||
def flatten(self, obj, data):
|
||||
name_bundle = self.name_bundler(obj)
|
||||
meta = dict(dtype=str(obj.dtype), **name_bundle)
|
||||
buf = encode(obj.tolist())
|
||||
data = self.pp.flatten_pandas(buf, data, meta)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
buf, meta = self.pp.restore_pandas(data)
|
||||
dtype = meta.get('dtype', None)
|
||||
name_bundle = {k: v for k, v in meta.items() if k in {'name', 'names'}}
|
||||
idx = self.index_constructor(decode(buf), dtype=dtype, **name_bundle)
|
||||
return idx
|
||||
|
||||
|
||||
class PandasPeriodIndexHandler(PandasIndexHandler):
|
||||
index_constructor = pd.PeriodIndex
|
||||
|
||||
|
||||
class PandasMultiIndexHandler(PandasIndexHandler):
|
||||
def name_bundler(self, obj):
|
||||
return {'names': obj.names}
|
||||
|
||||
|
||||
class PandasTimestampHandler(BaseHandler):
|
||||
pp = PandasProcessor()
|
||||
|
||||
def flatten(self, obj, data):
|
||||
meta = {'isoformat': obj.isoformat()}
|
||||
buf = ''
|
||||
data = self.pp.flatten_pandas(buf, data, meta)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
_, meta = self.pp.restore_pandas(data)
|
||||
isoformat = meta['isoformat']
|
||||
obj = pd.Timestamp(isoformat)
|
||||
return obj
|
||||
|
||||
|
||||
class PandasPeriodHandler(BaseHandler):
|
||||
pp = PandasProcessor()
|
||||
|
||||
def flatten(self, obj, data):
|
||||
meta = {
|
||||
'start_time': encode(obj.start_time),
|
||||
'freqstr': obj.freqstr,
|
||||
}
|
||||
buf = ''
|
||||
data = self.pp.flatten_pandas(buf, data, meta)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
_, meta = self.pp.restore_pandas(data)
|
||||
start_time = decode(meta['start_time'])
|
||||
freqstr = meta['freqstr']
|
||||
obj = pd.Period(start_time, freqstr)
|
||||
return obj
|
||||
|
||||
|
||||
class PandasIntervalHandler(BaseHandler):
|
||||
pp = PandasProcessor()
|
||||
|
||||
def flatten(self, obj, data):
|
||||
meta = {
|
||||
'left': encode(obj.left),
|
||||
'right': encode(obj.right),
|
||||
'closed': obj.closed,
|
||||
}
|
||||
buf = ''
|
||||
data = self.pp.flatten_pandas(buf, data, meta)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
_, meta = self.pp.restore_pandas(data)
|
||||
left = decode(meta['left'])
|
||||
right = decode(meta['right'])
|
||||
closed = str(meta['closed'])
|
||||
obj = pd.Interval(left, right, closed=closed)
|
||||
return obj
|
||||
|
||||
|
||||
def register_handlers():
|
||||
register_numpy_handlers()
|
||||
register(pd.DataFrame, PandasDfHandler, base=True)
|
||||
register(pd.Series, PandasSeriesHandler, base=True)
|
||||
register(pd.Index, PandasIndexHandler, base=True)
|
||||
register(pd.PeriodIndex, PandasPeriodIndexHandler, base=True)
|
||||
register(pd.MultiIndex, PandasMultiIndexHandler, base=True)
|
||||
register(pd.Timestamp, PandasTimestampHandler, base=True)
|
||||
register(pd.Period, PandasPeriodHandler, base=True)
|
||||
register(pd.Interval, PandasIntervalHandler, base=True)
|
||||
|
||||
|
||||
def unregister_handlers():
|
||||
unregister_numpy_handlers()
|
||||
unregister(pd.DataFrame)
|
||||
unregister(pd.Series)
|
||||
unregister(pd.Index)
|
||||
unregister(pd.PeriodIndex)
|
||||
unregister(pd.MultiIndex)
|
||||
unregister(pd.Timestamp)
|
||||
unregister(pd.Period)
|
||||
unregister(pd.Interval)
|
294
matteo_env/Lib/site-packages/jsonpickle/handlers.py
Normal file
294
matteo_env/Lib/site-packages/jsonpickle/handlers.py
Normal file
|
@ -0,0 +1,294 @@
|
|||
"""
|
||||
Custom handlers may be created to handle other objects. Each custom handler
|
||||
must derive from :class:`jsonpickle.handlers.BaseHandler` and
|
||||
implement ``flatten`` and ``restore``.
|
||||
|
||||
A handler can be bound to other types by calling
|
||||
:func:`jsonpickle.handlers.register`.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
import array
|
||||
import copy
|
||||
import datetime
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
import uuid
|
||||
|
||||
from . import compat
|
||||
from . import util
|
||||
|
||||
|
||||
class Registry(object):
|
||||
def __init__(self):
|
||||
self._handlers = {}
|
||||
self._base_handlers = {}
|
||||
|
||||
def get(self, cls_or_name, default=None):
|
||||
"""
|
||||
:param cls_or_name: the type or its fully qualified name
|
||||
:param default: default value, if a matching handler is not found
|
||||
|
||||
Looks up a handler by type reference or its fully
|
||||
qualified name. If a direct match
|
||||
is not found, the search is performed over all
|
||||
handlers registered with base=True.
|
||||
"""
|
||||
handler = self._handlers.get(cls_or_name)
|
||||
# attempt to find a base class
|
||||
if handler is None and util.is_type(cls_or_name):
|
||||
for cls, base_handler in self._base_handlers.items():
|
||||
if issubclass(cls_or_name, cls):
|
||||
return base_handler
|
||||
return default if handler is None else handler
|
||||
|
||||
def register(self, cls, handler=None, base=False):
|
||||
"""Register the a custom handler for a class
|
||||
|
||||
:param cls: The custom object class to handle
|
||||
:param handler: The custom handler class (if
|
||||
None, a decorator wrapper is returned)
|
||||
:param base: Indicates whether the handler should
|
||||
be registered for all subclasses
|
||||
|
||||
This function can be also used as a decorator
|
||||
by omitting the `handler` argument::
|
||||
|
||||
@jsonpickle.handlers.register(Foo, base=True)
|
||||
class FooHandler(jsonpickle.handlers.BaseHandler):
|
||||
pass
|
||||
|
||||
"""
|
||||
if handler is None:
|
||||
|
||||
def _register(handler_cls):
|
||||
self.register(cls, handler=handler_cls, base=base)
|
||||
return handler_cls
|
||||
|
||||
return _register
|
||||
if not util.is_type(cls):
|
||||
raise TypeError('{!r} is not a class/type'.format(cls))
|
||||
# store both the name and the actual type for the ugly cases like
|
||||
# _sre.SRE_Pattern that cannot be loaded back directly
|
||||
self._handlers[util.importable_name(cls)] = self._handlers[cls] = handler
|
||||
if base:
|
||||
# only store the actual type for subclass checking
|
||||
self._base_handlers[cls] = handler
|
||||
|
||||
def unregister(self, cls):
|
||||
self._handlers.pop(cls, None)
|
||||
self._handlers.pop(util.importable_name(cls), None)
|
||||
self._base_handlers.pop(cls, None)
|
||||
|
||||
|
||||
registry = Registry()
|
||||
register = registry.register
|
||||
unregister = registry.unregister
|
||||
get = registry.get
|
||||
|
||||
|
||||
class BaseHandler(object):
|
||||
def __init__(self, context):
|
||||
"""
|
||||
Initialize a new handler to handle a registered type.
|
||||
|
||||
:Parameters:
|
||||
- `context`: reference to pickler/unpickler
|
||||
|
||||
"""
|
||||
self.context = context
|
||||
|
||||
def __call__(self, context):
|
||||
"""This permits registering either Handler instances or classes
|
||||
|
||||
:Parameters:
|
||||
- `context`: reference to pickler/unpickler
|
||||
"""
|
||||
self.context = context
|
||||
return self
|
||||
|
||||
def flatten(self, obj, data):
|
||||
"""
|
||||
Flatten `obj` into a json-friendly form and write result to `data`.
|
||||
|
||||
:param object obj: The object to be serialized.
|
||||
:param dict data: A partially filled dictionary which will contain the
|
||||
json-friendly representation of `obj` once this method has
|
||||
finished.
|
||||
"""
|
||||
raise NotImplementedError('You must implement flatten() in %s' % self.__class__)
|
||||
|
||||
def restore(self, obj):
|
||||
"""
|
||||
Restore an object of the registered type from the json-friendly
|
||||
representation `obj` and return it.
|
||||
"""
|
||||
raise NotImplementedError('You must implement restore() in %s' % self.__class__)
|
||||
|
||||
@classmethod
|
||||
def handles(self, cls):
|
||||
"""
|
||||
Register this handler for the given class. Suitable as a decorator,
|
||||
e.g.::
|
||||
|
||||
@MyCustomHandler.handles
|
||||
class MyCustomClass:
|
||||
def __reduce__(self):
|
||||
...
|
||||
"""
|
||||
registry.register(cls, self)
|
||||
return cls
|
||||
|
||||
|
||||
class ArrayHandler(BaseHandler):
|
||||
"""Flatten and restore array.array objects"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
data['typecode'] = obj.typecode
|
||||
data['values'] = self.context.flatten(obj.tolist(), reset=False)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
typecode = data['typecode']
|
||||
values = self.context.restore(data['values'], reset=False)
|
||||
if typecode == 'c':
|
||||
values = [bytes(x) for x in values]
|
||||
return array.array(typecode, values)
|
||||
|
||||
|
||||
ArrayHandler.handles(array.array)
|
||||
|
||||
|
||||
class DatetimeHandler(BaseHandler):
|
||||
|
||||
"""Custom handler for datetime objects
|
||||
|
||||
Datetime objects use __reduce__, and they generate binary strings encoding
|
||||
the payload. This handler encodes that payload to reconstruct the
|
||||
object.
|
||||
|
||||
"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
pickler = self.context
|
||||
if not pickler.unpicklable:
|
||||
if hasattr(obj, 'isoformat'):
|
||||
result = obj.isoformat()
|
||||
else:
|
||||
result = compat.ustr(obj)
|
||||
return result
|
||||
cls, args = obj.__reduce__()
|
||||
flatten = pickler.flatten
|
||||
payload = util.b64encode(args[0])
|
||||
args = [payload] + [flatten(i, reset=False) for i in args[1:]]
|
||||
data['__reduce__'] = (flatten(cls, reset=False), args)
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
cls, args = data['__reduce__']
|
||||
unpickler = self.context
|
||||
restore = unpickler.restore
|
||||
cls = restore(cls, reset=False)
|
||||
value = util.b64decode(args[0])
|
||||
params = (value,) + tuple([restore(i, reset=False) for i in args[1:]])
|
||||
return cls.__new__(cls, *params)
|
||||
|
||||
|
||||
DatetimeHandler.handles(datetime.datetime)
|
||||
DatetimeHandler.handles(datetime.date)
|
||||
DatetimeHandler.handles(datetime.time)
|
||||
|
||||
|
||||
class RegexHandler(BaseHandler):
|
||||
"""Flatten _sre.SRE_Pattern (compiled regex) objects"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
data['pattern'] = obj.pattern
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
return re.compile(data['pattern'])
|
||||
|
||||
|
||||
RegexHandler.handles(type(re.compile('')))
|
||||
|
||||
|
||||
class QueueHandler(BaseHandler):
|
||||
"""Opaquely serializes Queue objects
|
||||
|
||||
Queues contains mutex and condition variables which cannot be serialized.
|
||||
Construct a new Queue instance when restoring.
|
||||
|
||||
"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
return compat.queue.Queue()
|
||||
|
||||
|
||||
QueueHandler.handles(compat.queue.Queue)
|
||||
|
||||
|
||||
class CloneFactory(object):
|
||||
"""Serialization proxy for collections.defaultdict's default_factory"""
|
||||
|
||||
def __init__(self, exemplar):
|
||||
self.exemplar = exemplar
|
||||
|
||||
def __call__(self, clone=copy.copy):
|
||||
"""Create new instances by making copies of the provided exemplar"""
|
||||
return clone(self.exemplar)
|
||||
|
||||
def __repr__(self):
|
||||
return '<CloneFactory object at 0x{:x} ({})>'.format(id(self), self.exemplar)
|
||||
|
||||
|
||||
class UUIDHandler(BaseHandler):
|
||||
"""Serialize uuid.UUID objects"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
data['hex'] = obj.hex
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
return uuid.UUID(data['hex'])
|
||||
|
||||
|
||||
UUIDHandler.handles(uuid.UUID)
|
||||
|
||||
|
||||
class LockHandler(BaseHandler):
|
||||
"""Serialize threading.Lock objects"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
data['locked'] = obj.locked()
|
||||
return data
|
||||
|
||||
def restore(self, data):
|
||||
lock = threading.Lock()
|
||||
if data.get('locked', False):
|
||||
lock.acquire()
|
||||
return lock
|
||||
|
||||
|
||||
_lock = threading.Lock()
|
||||
LockHandler.handles(_lock.__class__)
|
||||
|
||||
|
||||
class TextIOHandler(BaseHandler):
|
||||
"""Serialize file descriptors as None because we cannot roundtrip"""
|
||||
|
||||
def flatten(self, obj, data):
|
||||
return None
|
||||
|
||||
def restore(self, data):
|
||||
"""Restore should never get called because flatten() returns None"""
|
||||
raise AssertionError('Restoring IO.TextIOHandler is not supported')
|
||||
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
TextIOHandler.handles(io.TextIOWrapper)
|
743
matteo_env/Lib/site-packages/jsonpickle/pickler.py
Normal file
743
matteo_env/Lib/site-packages/jsonpickle/pickler.py
Normal file
|
@ -0,0 +1,743 @@
|
|||
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
|
||||
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This software is licensed as described in the file COPYING, which
|
||||
# you should have received as part of this distribution.
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
import decimal
|
||||
import warnings
|
||||
import sys
|
||||
import types
|
||||
from itertools import chain, islice
|
||||
|
||||
from . import compat
|
||||
from . import util
|
||||
from . import tags
|
||||
from . import handlers
|
||||
from .backend import json
|
||||
from .compat import numeric_types, string_types, PY3, PY2
|
||||
|
||||
|
||||
def encode(
|
||||
value,
|
||||
unpicklable=True,
|
||||
make_refs=True,
|
||||
keys=False,
|
||||
max_depth=None,
|
||||
reset=True,
|
||||
backend=None,
|
||||
warn=False,
|
||||
context=None,
|
||||
max_iter=None,
|
||||
use_decimal=False,
|
||||
numeric_keys=False,
|
||||
use_base85=False,
|
||||
fail_safe=None,
|
||||
indent=None,
|
||||
separators=None,
|
||||
):
|
||||
"""Return a JSON formatted representation of value, a Python object.
|
||||
|
||||
:param unpicklable: If set to False then the output will not contain the
|
||||
information necessary to turn the JSON data back into Python objects,
|
||||
but a simpler JSON stream is produced.
|
||||
:param max_depth: If set to a non-negative integer then jsonpickle will
|
||||
not recurse deeper than 'max_depth' steps into the object. Anything
|
||||
deeper than 'max_depth' is represented using a Python repr() of the
|
||||
object.
|
||||
:param make_refs: If set to False jsonpickle's referencing support is
|
||||
disabled. Objects that are id()-identical won't be preserved across
|
||||
encode()/decode(), but the resulting JSON stream will be conceptually
|
||||
simpler. jsonpickle detects cyclical objects and will break the cycle
|
||||
by calling repr() instead of recursing when make_refs is set False.
|
||||
:param keys: If set to True then jsonpickle will encode non-string
|
||||
dictionary keys instead of coercing them into strings via `repr()`.
|
||||
This is typically what you want if you need to support Integer or
|
||||
objects as dictionary keys.
|
||||
:param numeric_keys: Only use this option if the backend supports integer
|
||||
dict keys natively. This flag tells jsonpickle to leave numeric keys
|
||||
as-is rather than conforming them to json-friendly strings.
|
||||
Using ``keys=True`` is the typical solution for integer keys, so only
|
||||
use this if you have a specific use case where you want to allow the
|
||||
backend to handle serialization of numeric dict keys.
|
||||
:param warn: If set to True then jsonpickle will warn when it
|
||||
returns None for an object which it cannot pickle
|
||||
(e.g. file descriptors).
|
||||
:param max_iter: If set to a non-negative integer then jsonpickle will
|
||||
consume at most `max_iter` items when pickling iterators.
|
||||
:param use_decimal: If set to True jsonpickle will allow Decimal
|
||||
instances to pass-through, with the assumption that the simplejson
|
||||
backend will be used in `use_decimal` mode. In order to use this mode
|
||||
you will need to configure simplejson::
|
||||
|
||||
jsonpickle.set_encoder_options('simplejson',
|
||||
use_decimal=True, sort_keys=True)
|
||||
jsonpickle.set_decoder_options('simplejson',
|
||||
use_decimal=True)
|
||||
jsonpickle.set_preferred_backend('simplejson')
|
||||
|
||||
NOTE: A side-effect of the above settings is that float values will be
|
||||
converted to Decimal when converting to json.
|
||||
:param use_base85:
|
||||
If possible, use base85 to encode binary data. Base85 bloats binary data
|
||||
by 1/4 as opposed to base64, which expands it by 1/3. This argument is
|
||||
ignored on Python 2 because it doesn't support it.
|
||||
:param fail_safe: If set to a function exceptions are ignored when pickling
|
||||
and if a exception happens the function is called and the return value
|
||||
is used as the value for the object that caused the error
|
||||
:param indent: When `indent` is a non-negative integer, then JSON array
|
||||
elements and object members will be pretty-printed with that indent
|
||||
level. An indent level of 0 will only insert newlines. ``None`` is
|
||||
the most compact representation. Since the default item separator is
|
||||
``(', ', ': ')``, the output might include trailing whitespace when
|
||||
``indent`` is specified. You can use ``separators=(',', ': ')`` to
|
||||
avoid this. This value is passed directly to the active JSON backend
|
||||
library and not used by jsonpickle directly.
|
||||
:param separators:
|
||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
||||
then it will be used instead of the default ``(', ', ': ')``
|
||||
separators. ``(',', ':')`` is the most compact JSON representation.
|
||||
This value is passed directly to the active JSON backend library and
|
||||
not used by jsonpickle directly.
|
||||
|
||||
>>> encode('my string') == '"my string"'
|
||||
True
|
||||
>>> encode(36) == '36'
|
||||
True
|
||||
>>> encode({'foo': True}) == '{"foo": true}'
|
||||
True
|
||||
>>> encode({'foo': [1, 2, [3, 4]]}, max_depth=1)
|
||||
'{"foo": "[1, 2, [3, 4]]"}'
|
||||
|
||||
"""
|
||||
backend = backend or json
|
||||
context = context or Pickler(
|
||||
unpicklable=unpicklable,
|
||||
make_refs=make_refs,
|
||||
keys=keys,
|
||||
backend=backend,
|
||||
max_depth=max_depth,
|
||||
warn=warn,
|
||||
max_iter=max_iter,
|
||||
numeric_keys=numeric_keys,
|
||||
use_decimal=use_decimal,
|
||||
use_base85=use_base85,
|
||||
fail_safe=fail_safe,
|
||||
)
|
||||
return backend.encode(
|
||||
context.flatten(value, reset=reset), indent=indent, separators=separators
|
||||
)
|
||||
|
||||
|
||||
class Pickler(object):
|
||||
def __init__(
|
||||
self,
|
||||
unpicklable=True,
|
||||
make_refs=True,
|
||||
max_depth=None,
|
||||
backend=None,
|
||||
keys=False,
|
||||
warn=False,
|
||||
max_iter=None,
|
||||
numeric_keys=False,
|
||||
use_decimal=False,
|
||||
use_base85=False,
|
||||
fail_safe=None,
|
||||
):
|
||||
self.unpicklable = unpicklable
|
||||
self.make_refs = make_refs
|
||||
self.backend = backend or json
|
||||
self.keys = keys
|
||||
self.warn = warn
|
||||
self.numeric_keys = numeric_keys
|
||||
self.use_base85 = use_base85 and (not PY2)
|
||||
# The current recursion depth
|
||||
self._depth = -1
|
||||
# The maximal recursion depth
|
||||
self._max_depth = max_depth
|
||||
# Maps id(obj) to reference IDs
|
||||
self._objs = {}
|
||||
# Avoids garbage collection
|
||||
self._seen = []
|
||||
# maximum amount of items to take from a pickled iterator
|
||||
self._max_iter = max_iter
|
||||
# Whether to allow decimals to pass-through
|
||||
self._use_decimal = use_decimal
|
||||
|
||||
if self.use_base85:
|
||||
self._bytes_tag = tags.B85
|
||||
self._bytes_encoder = util.b85encode
|
||||
else:
|
||||
self._bytes_tag = tags.B64
|
||||
self._bytes_encoder = util.b64encode
|
||||
|
||||
# ignore exceptions
|
||||
self.fail_safe = fail_safe
|
||||
|
||||
def reset(self):
|
||||
self._objs = {}
|
||||
self._depth = -1
|
||||
self._seen = []
|
||||
|
||||
def _push(self):
|
||||
"""Steps down one level in the namespace."""
|
||||
self._depth += 1
|
||||
|
||||
def _pop(self, value):
|
||||
"""Step up one level in the namespace and return the value.
|
||||
If we're at the root, reset the pickler's state.
|
||||
"""
|
||||
self._depth -= 1
|
||||
if self._depth == -1:
|
||||
self.reset()
|
||||
return value
|
||||
|
||||
def _log_ref(self, obj):
|
||||
"""
|
||||
Log a reference to an in-memory object.
|
||||
Return True if this object is new and was assigned
|
||||
a new ID. Otherwise return False.
|
||||
"""
|
||||
objid = id(obj)
|
||||
is_new = objid not in self._objs
|
||||
if is_new:
|
||||
new_id = len(self._objs)
|
||||
self._objs[objid] = new_id
|
||||
return is_new
|
||||
|
||||
def _mkref(self, obj):
|
||||
"""
|
||||
Log a reference to an in-memory object, and return
|
||||
if that object should be considered newly logged.
|
||||
"""
|
||||
is_new = self._log_ref(obj)
|
||||
# Pretend the object is new
|
||||
pretend_new = not self.unpicklable or not self.make_refs
|
||||
return pretend_new or is_new
|
||||
|
||||
def _getref(self, obj):
|
||||
return {tags.ID: self._objs.get(id(obj))}
|
||||
|
||||
def flatten(self, obj, reset=True):
|
||||
"""Takes an object and returns a JSON-safe representation of it.
|
||||
|
||||
Simply returns any of the basic builtin datatypes
|
||||
|
||||
>>> p = Pickler()
|
||||
>>> p.flatten('hello world') == 'hello world'
|
||||
True
|
||||
>>> p.flatten(49)
|
||||
49
|
||||
>>> p.flatten(350.0)
|
||||
350.0
|
||||
>>> p.flatten(True)
|
||||
True
|
||||
>>> p.flatten(False)
|
||||
False
|
||||
>>> r = p.flatten(None)
|
||||
>>> r is None
|
||||
True
|
||||
>>> p.flatten(False)
|
||||
False
|
||||
>>> p.flatten([1, 2, 3, 4])
|
||||
[1, 2, 3, 4]
|
||||
>>> p.flatten((1,2,))[tags.TUPLE]
|
||||
[1, 2]
|
||||
>>> p.flatten({'key': 'value'}) == {'key': 'value'}
|
||||
True
|
||||
"""
|
||||
if reset:
|
||||
self.reset()
|
||||
return self._flatten(obj)
|
||||
|
||||
def _flatten(self, obj):
|
||||
|
||||
#########################################
|
||||
# if obj is nonrecursive return immediately
|
||||
# for performance reasons we don't want to do recursive checks
|
||||
if PY2 and isinstance(obj, types.FileType):
|
||||
return self._flatten_file(obj)
|
||||
|
||||
if util.is_bytes(obj):
|
||||
return self._flatten_bytestring(obj)
|
||||
|
||||
if util.is_primitive(obj):
|
||||
return obj
|
||||
|
||||
# Decimal is a primitive when use_decimal is True
|
||||
if self._use_decimal and isinstance(obj, decimal.Decimal):
|
||||
return obj
|
||||
#########################################
|
||||
|
||||
self._push()
|
||||
return self._pop(self._flatten_obj(obj))
|
||||
|
||||
def _max_reached(self):
|
||||
return self._depth == self._max_depth
|
||||
|
||||
def _flatten_obj(self, obj):
|
||||
self._seen.append(obj)
|
||||
|
||||
max_reached = self._max_reached()
|
||||
|
||||
try:
|
||||
|
||||
in_cycle = _in_cycle(obj, self._objs, max_reached, self.make_refs)
|
||||
if in_cycle:
|
||||
# break the cycle
|
||||
flatten_func = repr
|
||||
else:
|
||||
flatten_func = self._get_flattener(obj)
|
||||
|
||||
if flatten_func is None:
|
||||
self._pickle_warning(obj)
|
||||
return None
|
||||
|
||||
return flatten_func(obj)
|
||||
|
||||
except (KeyboardInterrupt, SystemExit) as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
if self.fail_safe is None:
|
||||
raise e
|
||||
else:
|
||||
return self.fail_safe(e)
|
||||
|
||||
def _list_recurse(self, obj):
|
||||
return [self._flatten(v) for v in obj]
|
||||
|
||||
def _get_flattener(self, obj):
|
||||
|
||||
list_recurse = self._list_recurse
|
||||
|
||||
if util.is_list(obj):
|
||||
if self._mkref(obj):
|
||||
return list_recurse
|
||||
else:
|
||||
self._push()
|
||||
return self._getref
|
||||
|
||||
# We handle tuples and sets by encoding them in a "(tuple|set)dict"
|
||||
if util.is_tuple(obj):
|
||||
if not self.unpicklable:
|
||||
return list_recurse
|
||||
return lambda obj: {tags.TUPLE: [self._flatten(v) for v in obj]}
|
||||
|
||||
if util.is_set(obj):
|
||||
if not self.unpicklable:
|
||||
return list_recurse
|
||||
return lambda obj: {tags.SET: [self._flatten(v) for v in obj]}
|
||||
|
||||
if util.is_dictionary(obj):
|
||||
return self._flatten_dict_obj
|
||||
|
||||
if util.is_type(obj):
|
||||
return _mktyperef
|
||||
|
||||
if util.is_object(obj):
|
||||
return self._ref_obj_instance
|
||||
|
||||
if util.is_module_function(obj):
|
||||
return self._flatten_function
|
||||
|
||||
# instance methods, lambdas, old style classes...
|
||||
self._pickle_warning(obj)
|
||||
return None
|
||||
|
||||
def _ref_obj_instance(self, obj):
|
||||
"""Reference an existing object or flatten if new"""
|
||||
if self.unpicklable:
|
||||
if self._mkref(obj):
|
||||
# We've never seen this object so return its
|
||||
# json representation.
|
||||
return self._flatten_obj_instance(obj)
|
||||
# We've seen this object before so place an object
|
||||
# reference tag in the data. This avoids infinite recursion
|
||||
# when processing cyclical objects.
|
||||
return self._getref(obj)
|
||||
else:
|
||||
max_reached = self._max_reached()
|
||||
in_cycle = _in_cycle(obj, self._objs, max_reached, False)
|
||||
if in_cycle:
|
||||
# A circular becomes None.
|
||||
return None
|
||||
|
||||
self._mkref(obj)
|
||||
return self._flatten_obj_instance(obj)
|
||||
|
||||
def _flatten_file(self, obj):
|
||||
"""
|
||||
Special case file objects
|
||||
"""
|
||||
assert not PY3 and isinstance(obj, types.FileType)
|
||||
return None
|
||||
|
||||
def _flatten_bytestring(self, obj):
|
||||
if PY2:
|
||||
try:
|
||||
return obj.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
return {self._bytes_tag: self._bytes_encoder(obj)}
|
||||
|
||||
def _flatten_obj_instance(self, obj):
|
||||
"""Recursively flatten an instance and return a json-friendly dict"""
|
||||
data = {}
|
||||
has_class = hasattr(obj, '__class__')
|
||||
has_dict = hasattr(obj, '__dict__')
|
||||
has_slots = not has_dict and hasattr(obj, '__slots__')
|
||||
has_getnewargs = util.has_method(obj, '__getnewargs__')
|
||||
has_getnewargs_ex = util.has_method(obj, '__getnewargs_ex__')
|
||||
has_getinitargs = util.has_method(obj, '__getinitargs__')
|
||||
has_reduce, has_reduce_ex = util.has_reduce(obj)
|
||||
|
||||
# Support objects with __getstate__(); this ensures that
|
||||
# both __setstate__() and __getstate__() are implemented
|
||||
has_getstate = hasattr(obj, '__getstate__')
|
||||
# not using has_method since __getstate__() is handled separately below
|
||||
|
||||
if has_class:
|
||||
cls = obj.__class__
|
||||
else:
|
||||
cls = type(obj)
|
||||
|
||||
# Check for a custom handler
|
||||
class_name = util.importable_name(cls)
|
||||
handler = handlers.get(cls, handlers.get(class_name))
|
||||
if handler is not None:
|
||||
if self.unpicklable:
|
||||
data[tags.OBJECT] = class_name
|
||||
return handler(self).flatten(obj, data)
|
||||
|
||||
reduce_val = None
|
||||
|
||||
if self.unpicklable:
|
||||
if has_reduce and not has_reduce_ex:
|
||||
try:
|
||||
reduce_val = obj.__reduce__()
|
||||
except TypeError:
|
||||
# A lot of builtin types have a reduce which
|
||||
# just raises a TypeError
|
||||
# we ignore those
|
||||
pass
|
||||
|
||||
# test for a reduce implementation, and redirect before
|
||||
# doing anything else if that is what reduce requests
|
||||
elif has_reduce_ex:
|
||||
try:
|
||||
# we're implementing protocol 2
|
||||
reduce_val = obj.__reduce_ex__(2)
|
||||
except TypeError:
|
||||
# A lot of builtin types have a reduce which
|
||||
# just raises a TypeError
|
||||
# we ignore those
|
||||
pass
|
||||
|
||||
if reduce_val and isinstance(reduce_val, string_types):
|
||||
try:
|
||||
varpath = iter(reduce_val.split('.'))
|
||||
# curmod will be transformed by the
|
||||
# loop into the value to pickle
|
||||
curmod = sys.modules[next(varpath)]
|
||||
for modname in varpath:
|
||||
curmod = getattr(curmod, modname)
|
||||
# replace obj with value retrieved
|
||||
return self._flatten(curmod)
|
||||
except KeyError:
|
||||
# well, we can't do anything with that, so we ignore it
|
||||
pass
|
||||
|
||||
elif reduce_val:
|
||||
# at this point, reduce_val should be some kind of iterable
|
||||
# pad out to len 5
|
||||
rv_as_list = list(reduce_val)
|
||||
insufficiency = 5 - len(rv_as_list)
|
||||
if insufficiency:
|
||||
rv_as_list += [None] * insufficiency
|
||||
|
||||
if getattr(rv_as_list[0], '__name__', '') == '__newobj__':
|
||||
rv_as_list[0] = tags.NEWOBJ
|
||||
|
||||
f, args, state, listitems, dictitems = rv_as_list
|
||||
|
||||
# check that getstate/setstate is sane
|
||||
if not (
|
||||
state
|
||||
and hasattr(obj, '__getstate__')
|
||||
and not hasattr(obj, '__setstate__')
|
||||
and not isinstance(obj, dict)
|
||||
):
|
||||
# turn iterators to iterables for convenient serialization
|
||||
if rv_as_list[3]:
|
||||
rv_as_list[3] = tuple(rv_as_list[3])
|
||||
|
||||
if rv_as_list[4]:
|
||||
rv_as_list[4] = tuple(rv_as_list[4])
|
||||
|
||||
reduce_args = list(map(self._flatten, rv_as_list))
|
||||
last_index = len(reduce_args) - 1
|
||||
while last_index >= 2 and reduce_args[last_index] is None:
|
||||
last_index -= 1
|
||||
data[tags.REDUCE] = reduce_args[: last_index + 1]
|
||||
|
||||
return data
|
||||
|
||||
if has_class and not util.is_module(obj):
|
||||
if self.unpicklable:
|
||||
data[tags.OBJECT] = class_name
|
||||
|
||||
if has_getnewargs_ex:
|
||||
data[tags.NEWARGSEX] = list(map(self._flatten, obj.__getnewargs_ex__()))
|
||||
|
||||
if has_getnewargs and not has_getnewargs_ex:
|
||||
data[tags.NEWARGS] = self._flatten(obj.__getnewargs__())
|
||||
|
||||
if has_getinitargs:
|
||||
data[tags.INITARGS] = self._flatten(obj.__getinitargs__())
|
||||
|
||||
if has_getstate:
|
||||
try:
|
||||
state = obj.__getstate__()
|
||||
except TypeError:
|
||||
# Has getstate but it cannot be called, e.g. file descriptors
|
||||
# in Python3
|
||||
self._pickle_warning(obj)
|
||||
return None
|
||||
else:
|
||||
return self._getstate(state, data)
|
||||
|
||||
if util.is_module(obj):
|
||||
if self.unpicklable:
|
||||
data[tags.REPR] = '{name}/{name}'.format(name=obj.__name__)
|
||||
else:
|
||||
data = compat.ustr(obj)
|
||||
return data
|
||||
|
||||
if util.is_dictionary_subclass(obj):
|
||||
self._flatten_dict_obj(obj, data)
|
||||
return data
|
||||
|
||||
if util.is_sequence_subclass(obj):
|
||||
return self._flatten_sequence_obj(obj, data)
|
||||
|
||||
if util.is_iterator(obj):
|
||||
# force list in python 3
|
||||
data[tags.ITERATOR] = list(map(self._flatten, islice(obj, self._max_iter)))
|
||||
return data
|
||||
|
||||
if has_dict:
|
||||
# Support objects that subclasses list and set
|
||||
if util.is_sequence_subclass(obj):
|
||||
return self._flatten_sequence_obj(obj, data)
|
||||
|
||||
# hack for zope persistent objects; this unghostifies the object
|
||||
getattr(obj, '_', None)
|
||||
return self._flatten_dict_obj(obj.__dict__, data)
|
||||
|
||||
if has_slots:
|
||||
return self._flatten_newstyle_with_slots(obj, data)
|
||||
|
||||
# catchall return for data created above without a return
|
||||
# (e.g. __getnewargs__ is not supposed to be the end of the story)
|
||||
if data:
|
||||
return data
|
||||
|
||||
self._pickle_warning(obj)
|
||||
return None
|
||||
|
||||
def _flatten_function(self, obj):
|
||||
if self.unpicklable:
|
||||
data = {tags.FUNCTION: util.importable_name(obj)}
|
||||
else:
|
||||
data = None
|
||||
|
||||
return data
|
||||
|
||||
def _flatten_dict_obj(self, obj, data=None):
|
||||
"""Recursively call flatten() and return json-friendly dict"""
|
||||
if data is None:
|
||||
data = obj.__class__()
|
||||
|
||||
# If we allow non-string keys then we have to do a two-phase
|
||||
# encoding to ensure that the reference IDs are deterministic.
|
||||
if self.keys:
|
||||
# Phase 1: serialize regular objects, ignore fancy keys.
|
||||
flatten = self._flatten_string_key_value_pair
|
||||
for k, v in util.items(obj):
|
||||
flatten(k, v, data)
|
||||
|
||||
# Phase 2: serialize non-string keys.
|
||||
flatten = self._flatten_non_string_key_value_pair
|
||||
for k, v in util.items(obj):
|
||||
flatten(k, v, data)
|
||||
else:
|
||||
# If we have string keys only then we only need a single pass.
|
||||
flatten = self._flatten_key_value_pair
|
||||
for k, v in util.items(obj):
|
||||
flatten(k, v, data)
|
||||
|
||||
# the collections.defaultdict protocol
|
||||
if hasattr(obj, 'default_factory') and callable(obj.default_factory):
|
||||
factory = obj.default_factory
|
||||
if util.is_type(factory):
|
||||
# Reference the class/type
|
||||
value = _mktyperef(factory)
|
||||
else:
|
||||
# The factory is not a type and could reference e.g. functions
|
||||
# or even the object instance itself, which creates a cycle.
|
||||
if self._mkref(factory):
|
||||
# We've never seen this object before so pickle it in-place.
|
||||
# Create an instance from the factory and assume that the
|
||||
# resulting instance is a suitable examplar.
|
||||
value = self._flatten_obj_instance(handlers.CloneFactory(factory()))
|
||||
else:
|
||||
# We've seen this object before.
|
||||
# Break the cycle by emitting a reference.
|
||||
value = self._getref(factory)
|
||||
data['default_factory'] = value
|
||||
|
||||
# Sub-classes of dict
|
||||
if hasattr(obj, '__dict__') and self.unpicklable:
|
||||
dict_data = {}
|
||||
self._flatten_dict_obj(obj.__dict__, dict_data)
|
||||
data['__dict__'] = dict_data
|
||||
|
||||
return data
|
||||
|
||||
def _flatten_obj_attrs(self, obj, attrs, data):
|
||||
flatten = self._flatten_key_value_pair
|
||||
ok = False
|
||||
for k in attrs:
|
||||
try:
|
||||
value = getattr(obj, k)
|
||||
flatten(k, value, data)
|
||||
except AttributeError:
|
||||
# The attribute may have been deleted
|
||||
continue
|
||||
ok = True
|
||||
return ok
|
||||
|
||||
def _flatten_newstyle_with_slots(self, obj, data):
|
||||
"""Return a json-friendly dict for new-style objects with __slots__."""
|
||||
allslots = [
|
||||
_wrap_string_slot(getattr(cls, '__slots__', tuple()))
|
||||
for cls in obj.__class__.mro()
|
||||
]
|
||||
|
||||
if not self._flatten_obj_attrs(obj, chain(*allslots), data):
|
||||
attrs = [
|
||||
x for x in dir(obj) if not x.startswith('__') and not x.endswith('__')
|
||||
]
|
||||
self._flatten_obj_attrs(obj, attrs, data)
|
||||
|
||||
return data
|
||||
|
||||
def _flatten_key_value_pair(self, k, v, data):
|
||||
"""Flatten a key/value pair into the passed-in dictionary."""
|
||||
if not util.is_picklable(k, v):
|
||||
return data
|
||||
|
||||
if k is None:
|
||||
k = 'null' # for compatibility with common json encoders
|
||||
|
||||
if self.numeric_keys and isinstance(k, numeric_types):
|
||||
pass
|
||||
elif not isinstance(k, string_types):
|
||||
try:
|
||||
k = repr(k)
|
||||
except Exception:
|
||||
k = compat.ustr(k)
|
||||
|
||||
data[k] = self._flatten(v)
|
||||
return data
|
||||
|
||||
def _flatten_non_string_key_value_pair(self, k, v, data):
|
||||
"""Flatten only non-string key/value pairs"""
|
||||
if not util.is_picklable(k, v):
|
||||
return data
|
||||
if self.keys and not isinstance(k, string_types):
|
||||
k = self._escape_key(k)
|
||||
data[k] = self._flatten(v)
|
||||
return data
|
||||
|
||||
def _flatten_string_key_value_pair(self, k, v, data):
|
||||
"""Flatten string key/value pairs only."""
|
||||
if not util.is_picklable(k, v):
|
||||
return data
|
||||
if self.keys:
|
||||
if not isinstance(k, string_types):
|
||||
return data
|
||||
elif k.startswith(tags.JSON_KEY):
|
||||
k = self._escape_key(k)
|
||||
else:
|
||||
if k is None:
|
||||
k = 'null' # for compatibility with common json encoders
|
||||
|
||||
if self.numeric_keys and isinstance(k, numeric_types):
|
||||
pass
|
||||
elif not isinstance(k, string_types):
|
||||
try:
|
||||
k = repr(k)
|
||||
except Exception:
|
||||
k = compat.ustr(k)
|
||||
|
||||
data[k] = self._flatten(v)
|
||||
return data
|
||||
|
||||
def _flatten_sequence_obj(self, obj, data):
|
||||
"""Return a json-friendly dict for a sequence subclass."""
|
||||
if hasattr(obj, '__dict__'):
|
||||
self._flatten_dict_obj(obj.__dict__, data)
|
||||
value = [self._flatten(v) for v in obj]
|
||||
if self.unpicklable:
|
||||
data[tags.SEQ] = value
|
||||
else:
|
||||
return value
|
||||
return data
|
||||
|
||||
def _escape_key(self, k):
|
||||
return tags.JSON_KEY + encode(
|
||||
k,
|
||||
reset=False,
|
||||
keys=True,
|
||||
context=self,
|
||||
backend=self.backend,
|
||||
make_refs=self.make_refs,
|
||||
)
|
||||
|
||||
def _getstate(self, obj, data):
|
||||
state = self._flatten(obj)
|
||||
if self.unpicklable:
|
||||
data[tags.STATE] = state
|
||||
else:
|
||||
data = state
|
||||
return data
|
||||
|
||||
def _pickle_warning(self, obj):
|
||||
if self.warn:
|
||||
msg = 'jsonpickle cannot pickle %r: replaced with None' % obj
|
||||
warnings.warn(msg)
|
||||
|
||||
|
||||
def _in_cycle(obj, objs, max_reached, make_refs):
|
||||
return (
|
||||
max_reached or (not make_refs and id(obj) in objs)
|
||||
) and not util.is_primitive(obj)
|
||||
|
||||
|
||||
def _mktyperef(obj):
|
||||
"""Return a typeref dictionary
|
||||
|
||||
>>> _mktyperef(AssertionError) == {'py/type': 'builtins.AssertionError'}
|
||||
True
|
||||
|
||||
"""
|
||||
return {tags.TYPE: util.importable_name(obj)}
|
||||
|
||||
|
||||
def _wrap_string_slot(string):
|
||||
"""Converts __slots__ = 'a' into __slots__ = ('a',)"""
|
||||
if isinstance(string, string_types):
|
||||
return (string,)
|
||||
return string
|
52
matteo_env/Lib/site-packages/jsonpickle/tags.py
Normal file
52
matteo_env/Lib/site-packages/jsonpickle/tags.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
"""The jsonpickle.tags module provides the custom tags
|
||||
used for pickling and unpickling Python objects.
|
||||
|
||||
These tags are keys into the flattened dictionaries
|
||||
created by the Pickler class. The Unpickler uses
|
||||
these custom key names to identify dictionaries
|
||||
that need to be specially handled.
|
||||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
|
||||
BYTES = 'py/bytes'
|
||||
B64 = 'py/b64'
|
||||
B85 = 'py/b85'
|
||||
FUNCTION = 'py/function'
|
||||
ID = 'py/id'
|
||||
INITARGS = 'py/initargs'
|
||||
ITERATOR = 'py/iterator'
|
||||
JSON_KEY = 'json://'
|
||||
NEWARGS = 'py/newargs'
|
||||
NEWARGSEX = 'py/newargsex'
|
||||
NEWOBJ = 'py/newobj'
|
||||
OBJECT = 'py/object'
|
||||
REDUCE = 'py/reduce'
|
||||
REF = 'py/ref'
|
||||
REPR = 'py/repr'
|
||||
SEQ = 'py/seq'
|
||||
SET = 'py/set'
|
||||
STATE = 'py/state'
|
||||
TUPLE = 'py/tuple'
|
||||
TYPE = 'py/type'
|
||||
|
||||
# All reserved tag names
|
||||
RESERVED = {
|
||||
BYTES,
|
||||
FUNCTION,
|
||||
ID,
|
||||
INITARGS,
|
||||
ITERATOR,
|
||||
NEWARGS,
|
||||
NEWARGSEX,
|
||||
NEWOBJ,
|
||||
OBJECT,
|
||||
REDUCE,
|
||||
REF,
|
||||
REPR,
|
||||
SEQ,
|
||||
SET,
|
||||
STATE,
|
||||
TUPLE,
|
||||
TYPE,
|
||||
}
|
752
matteo_env/Lib/site-packages/jsonpickle/unpickler.py
Normal file
752
matteo_env/Lib/site-packages/jsonpickle/unpickler.py
Normal file
|
@ -0,0 +1,752 @@
|
|||
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
|
||||
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This software is licensed as described in the file COPYING, which
|
||||
# you should have received as part of this distribution.
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
import quopri
|
||||
import sys
|
||||
|
||||
from . import compat
|
||||
from . import util
|
||||
from . import tags
|
||||
from . import handlers
|
||||
from .compat import numeric_types
|
||||
from .backend import json
|
||||
|
||||
|
||||
def decode(
|
||||
string, backend=None, context=None, keys=False, reset=True, safe=False, classes=None
|
||||
):
|
||||
"""Convert a JSON string into a Python object.
|
||||
|
||||
The keyword argument 'keys' defaults to False.
|
||||
If set to True then jsonpickle will decode non-string dictionary keys
|
||||
into python objects via the jsonpickle protocol.
|
||||
|
||||
The keyword argument 'classes' defaults to None.
|
||||
If set to a single class, or a sequence (list, set, tuple) of classes,
|
||||
then the classes will be made available when constructing objects. This
|
||||
can be used to give jsonpickle access to local classes that are not
|
||||
available through the global module import scope.
|
||||
|
||||
>>> decode('"my string"') == 'my string'
|
||||
True
|
||||
>>> decode('36')
|
||||
36
|
||||
"""
|
||||
backend = backend or json
|
||||
context = context or Unpickler(keys=keys, backend=backend, safe=safe)
|
||||
data = backend.decode(string)
|
||||
return context.restore(data, reset=reset, classes=classes)
|
||||
|
||||
|
||||
def _safe_hasattr(obj, attr):
|
||||
"""Workaround unreliable hasattr() availability on sqlalchemy objects"""
|
||||
try:
|
||||
object.__getattribute__(obj, attr)
|
||||
return True
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
|
||||
def _is_json_key(key):
|
||||
"""Has this key a special object that has been encoded to JSON?"""
|
||||
return isinstance(key, compat.string_types) and key.startswith(tags.JSON_KEY)
|
||||
|
||||
|
||||
class _Proxy(object):
|
||||
"""Proxies are dummy objects that are later replaced by real instances
|
||||
|
||||
The `restore()` function has to solve a tricky problem when pickling
|
||||
objects with cyclical references -- the parent instance does not yet
|
||||
exist.
|
||||
|
||||
The problem is that `__getnewargs__()`, `__getstate__()`, custom handlers,
|
||||
and cyclical objects graphs are allowed to reference the yet-to-be-created
|
||||
object via the referencing machinery.
|
||||
|
||||
In other words, objects are allowed to depend on themselves for
|
||||
construction!
|
||||
|
||||
We solve this problem by placing dummy Proxy objects into the referencing
|
||||
machinery so that we can construct the child objects before constructing
|
||||
the parent. Objects are initially created with Proxy attribute values
|
||||
instead of real references.
|
||||
|
||||
We collect all objects that contain references to proxies and run
|
||||
a final sweep over them to swap in the real instance. This is done
|
||||
at the very end of the top-level `restore()`.
|
||||
|
||||
The `instance` attribute below is replaced with the real instance
|
||||
after `__new__()` has been used to construct the object and is used
|
||||
when swapping proxies with real instances.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.instance = None
|
||||
|
||||
def get(self):
|
||||
return self.instance
|
||||
|
||||
def reset(self, instance):
|
||||
self.instance = instance
|
||||
|
||||
|
||||
class _IDProxy(_Proxy):
|
||||
def __init__(self, objs, index):
|
||||
self._index = index
|
||||
self._objs = objs
|
||||
|
||||
def get(self):
|
||||
return self._objs[self._index]
|
||||
|
||||
|
||||
def _obj_setattr(obj, attr, proxy):
|
||||
setattr(obj, attr, proxy.get())
|
||||
|
||||
|
||||
def _obj_setvalue(obj, idx, proxy):
|
||||
obj[idx] = proxy.get()
|
||||
|
||||
|
||||
class Unpickler(object):
|
||||
def __init__(self, backend=None, keys=False, safe=False):
|
||||
self.backend = backend or json
|
||||
self.keys = keys
|
||||
self.safe = safe
|
||||
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
"""Resets the object's internal state."""
|
||||
# Map reference names to object instances
|
||||
self._namedict = {}
|
||||
|
||||
# The stack of names traversed for child objects
|
||||
self._namestack = []
|
||||
|
||||
# Map of objects to their index in the _objs list
|
||||
self._obj_to_idx = {}
|
||||
self._objs = []
|
||||
self._proxies = []
|
||||
|
||||
# Extra local classes not accessible globally
|
||||
self._classes = {}
|
||||
|
||||
def restore(self, obj, reset=True, classes=None):
|
||||
"""Restores a flattened object to its original python state.
|
||||
|
||||
Simply returns any of the basic builtin types
|
||||
|
||||
>>> u = Unpickler()
|
||||
>>> u.restore('hello world') == 'hello world'
|
||||
True
|
||||
>>> u.restore({'key': 'value'}) == {'key': 'value'}
|
||||
True
|
||||
|
||||
"""
|
||||
if reset:
|
||||
self.reset()
|
||||
if classes:
|
||||
self.register_classes(classes)
|
||||
value = self._restore(obj)
|
||||
if reset:
|
||||
self._swap_proxies()
|
||||
return value
|
||||
|
||||
def register_classes(self, classes):
|
||||
"""Register one or more classes
|
||||
|
||||
:param classes: sequence of classes or a single class to register
|
||||
|
||||
"""
|
||||
if isinstance(classes, (list, tuple, set)):
|
||||
for cls in classes:
|
||||
self.register_classes(cls)
|
||||
else:
|
||||
self._classes[util.importable_name(classes)] = classes
|
||||
|
||||
def _swap_proxies(self):
|
||||
"""Replace proxies with their corresponding instances"""
|
||||
for (obj, attr, proxy, method) in self._proxies:
|
||||
method(obj, attr, proxy)
|
||||
self._proxies = []
|
||||
|
||||
def _restore(self, obj):
|
||||
if has_tag(obj, tags.B64):
|
||||
restore = self._restore_base64
|
||||
elif has_tag(obj, tags.B85):
|
||||
restore = self._restore_base85
|
||||
elif has_tag(obj, tags.BYTES): # Backwards compatibility
|
||||
restore = self._restore_quopri
|
||||
elif has_tag(obj, tags.ID):
|
||||
restore = self._restore_id
|
||||
elif has_tag(obj, tags.REF): # Backwards compatibility
|
||||
restore = self._restore_ref
|
||||
elif has_tag(obj, tags.ITERATOR):
|
||||
restore = self._restore_iterator
|
||||
elif has_tag(obj, tags.TYPE):
|
||||
restore = self._restore_type
|
||||
elif has_tag(obj, tags.REPR): # Backwards compatibility
|
||||
restore = self._restore_repr
|
||||
elif has_tag(obj, tags.REDUCE):
|
||||
restore = self._restore_reduce
|
||||
elif has_tag(obj, tags.OBJECT):
|
||||
restore = self._restore_object
|
||||
elif has_tag(obj, tags.FUNCTION):
|
||||
restore = self._restore_function
|
||||
elif util.is_list(obj):
|
||||
restore = self._restore_list
|
||||
elif has_tag(obj, tags.TUPLE):
|
||||
restore = self._restore_tuple
|
||||
elif has_tag(obj, tags.SET):
|
||||
restore = self._restore_set
|
||||
elif util.is_dictionary(obj):
|
||||
restore = self._restore_dict
|
||||
else:
|
||||
|
||||
def restore(x):
|
||||
return x
|
||||
|
||||
return restore(obj)
|
||||
|
||||
def _restore_base64(self, obj):
|
||||
return util.b64decode(obj[tags.B64].encode('utf-8'))
|
||||
|
||||
def _restore_base85(self, obj):
|
||||
return util.b85decode(obj[tags.B85].encode('utf-8'))
|
||||
|
||||
#: For backwards compatibility with bytes data produced by older versions
|
||||
def _restore_quopri(self, obj):
|
||||
return quopri.decodestring(obj[tags.BYTES].encode('utf-8'))
|
||||
|
||||
def _restore_iterator(self, obj):
|
||||
return iter(self._restore_list(obj[tags.ITERATOR]))
|
||||
|
||||
def _restore_reduce(self, obj):
|
||||
"""
|
||||
Supports restoring with all elements of __reduce__ as per pep 307.
|
||||
Assumes that iterator items (the last two) are represented as lists
|
||||
as per pickler implementation.
|
||||
"""
|
||||
proxy = _Proxy()
|
||||
self._mkref(proxy)
|
||||
reduce_val = list(map(self._restore, obj[tags.REDUCE]))
|
||||
if len(reduce_val) < 5:
|
||||
reduce_val.extend([None] * (5 - len(reduce_val)))
|
||||
f, args, state, listitems, dictitems = reduce_val
|
||||
|
||||
if f == tags.NEWOBJ or getattr(f, '__name__', '') == '__newobj__':
|
||||
# mandated special case
|
||||
cls = args[0]
|
||||
if not isinstance(cls, type):
|
||||
cls = self._restore(cls)
|
||||
stage1 = cls.__new__(cls, *args[1:])
|
||||
else:
|
||||
stage1 = f(*args)
|
||||
|
||||
if state:
|
||||
try:
|
||||
stage1.__setstate__(state)
|
||||
except AttributeError:
|
||||
# it's fine - we'll try the prescribed default methods
|
||||
try:
|
||||
# we can't do a straight update here because we
|
||||
# need object identity of the state dict to be
|
||||
# preserved so that _swap_proxies works out
|
||||
for k, v in stage1.__dict__.items():
|
||||
state.setdefault(k, v)
|
||||
stage1.__dict__ = state
|
||||
except AttributeError:
|
||||
# next prescribed default
|
||||
try:
|
||||
for k, v in state.items():
|
||||
setattr(stage1, k, v)
|
||||
except Exception:
|
||||
dict_state, slots_state = state
|
||||
if dict_state:
|
||||
stage1.__dict__.update(dict_state)
|
||||
if slots_state:
|
||||
for k, v in slots_state.items():
|
||||
setattr(stage1, k, v)
|
||||
|
||||
if listitems:
|
||||
# should be lists if not None
|
||||
try:
|
||||
stage1.extend(listitems)
|
||||
except AttributeError:
|
||||
for x in listitems:
|
||||
stage1.append(x)
|
||||
|
||||
if dictitems:
|
||||
for k, v in dictitems:
|
||||
stage1.__setitem__(k, v)
|
||||
|
||||
proxy.reset(stage1)
|
||||
self._swapref(proxy, stage1)
|
||||
return stage1
|
||||
|
||||
def _restore_id(self, obj):
|
||||
try:
|
||||
idx = obj[tags.ID]
|
||||
return self._objs[idx]
|
||||
except IndexError:
|
||||
return _IDProxy(self._objs, idx)
|
||||
|
||||
def _restore_ref(self, obj):
|
||||
return self._namedict.get(obj[tags.REF])
|
||||
|
||||
def _restore_type(self, obj):
|
||||
typeref = loadclass(obj[tags.TYPE], classes=self._classes)
|
||||
if typeref is None:
|
||||
return obj
|
||||
return typeref
|
||||
|
||||
def _restore_repr(self, obj):
|
||||
if self.safe:
|
||||
# eval() is not allowed in safe mode
|
||||
return None
|
||||
obj = loadrepr(obj[tags.REPR])
|
||||
return self._mkref(obj)
|
||||
|
||||
def _restore_object(self, obj):
|
||||
class_name = obj[tags.OBJECT]
|
||||
cls = loadclass(class_name, classes=self._classes)
|
||||
handler = handlers.get(cls, handlers.get(class_name))
|
||||
if handler is not None: # custom handler
|
||||
proxy = _Proxy()
|
||||
self._mkref(proxy)
|
||||
instance = handler(self).restore(obj)
|
||||
proxy.reset(instance)
|
||||
self._swapref(proxy, instance)
|
||||
return instance
|
||||
|
||||
if cls is None:
|
||||
return self._mkref(obj)
|
||||
|
||||
return self._restore_object_instance(obj, cls)
|
||||
|
||||
def _restore_function(self, obj):
|
||||
return loadclass(obj[tags.FUNCTION], classes=self._classes)
|
||||
|
||||
def _loadfactory(self, obj):
|
||||
try:
|
||||
default_factory = obj['default_factory']
|
||||
except KeyError:
|
||||
return None
|
||||
del obj['default_factory']
|
||||
return self._restore(default_factory)
|
||||
|
||||
def _restore_object_instance(self, obj, cls):
|
||||
# This is a placeholder proxy object which allows child objects to
|
||||
# reference the parent object before it has been instantiated.
|
||||
proxy = _Proxy()
|
||||
self._mkref(proxy)
|
||||
|
||||
# An object can install itself as its own factory, so load the factory
|
||||
# after the instance is available for referencing.
|
||||
factory = self._loadfactory(obj)
|
||||
|
||||
if has_tag(obj, tags.NEWARGSEX):
|
||||
args, kwargs = obj[tags.NEWARGSEX]
|
||||
else:
|
||||
args = getargs(obj, classes=self._classes)
|
||||
kwargs = {}
|
||||
if args:
|
||||
args = self._restore(args)
|
||||
if kwargs:
|
||||
kwargs = self._restore(kwargs)
|
||||
|
||||
is_oldstyle = not (isinstance(cls, type) or getattr(cls, '__meta__', None))
|
||||
try:
|
||||
if (not is_oldstyle) and hasattr(cls, '__new__'):
|
||||
# new style classes
|
||||
if factory:
|
||||
instance = cls.__new__(cls, factory, *args, **kwargs)
|
||||
instance.default_factory = factory
|
||||
else:
|
||||
instance = cls.__new__(cls, *args, **kwargs)
|
||||
else:
|
||||
instance = object.__new__(cls)
|
||||
except TypeError: # old-style classes
|
||||
is_oldstyle = True
|
||||
|
||||
if is_oldstyle:
|
||||
try:
|
||||
instance = cls(*args)
|
||||
except TypeError: # fail gracefully
|
||||
try:
|
||||
instance = make_blank_classic(cls)
|
||||
except Exception: # fail gracefully
|
||||
return self._mkref(obj)
|
||||
|
||||
proxy.reset(instance)
|
||||
self._swapref(proxy, instance)
|
||||
|
||||
if isinstance(instance, tuple):
|
||||
return instance
|
||||
|
||||
instance = self._restore_object_instance_variables(obj, instance)
|
||||
|
||||
if _safe_hasattr(instance, 'default_factory') and isinstance(
|
||||
instance.default_factory, _Proxy
|
||||
):
|
||||
instance.default_factory = instance.default_factory.get()
|
||||
|
||||
return instance
|
||||
|
||||
def _restore_from_dict(self, obj, instance, ignorereserved=True):
|
||||
restore_key = self._restore_key_fn()
|
||||
method = _obj_setattr
|
||||
deferred = {}
|
||||
|
||||
for k, v in util.items(obj):
|
||||
# ignore the reserved attribute
|
||||
if ignorereserved and k in tags.RESERVED:
|
||||
continue
|
||||
if isinstance(k, numeric_types):
|
||||
str_k = k.__str__()
|
||||
else:
|
||||
str_k = k
|
||||
self._namestack.append(str_k)
|
||||
k = restore_key(k)
|
||||
# step into the namespace
|
||||
value = self._restore(v)
|
||||
if util.is_noncomplex(instance) or util.is_dictionary_subclass(instance):
|
||||
try:
|
||||
if k == '__dict__':
|
||||
setattr(instance, k, value)
|
||||
else:
|
||||
instance[k] = value
|
||||
except TypeError:
|
||||
# Immutable object, must be constructed in one shot
|
||||
if k != '__dict__':
|
||||
deferred[k] = value
|
||||
self._namestack.pop()
|
||||
continue
|
||||
else:
|
||||
setattr(instance, k, value)
|
||||
|
||||
# This instance has an instance variable named `k` that is
|
||||
# currently a proxy and must be replaced
|
||||
if isinstance(value, _Proxy):
|
||||
self._proxies.append((instance, k, value, method))
|
||||
|
||||
# step out
|
||||
self._namestack.pop()
|
||||
|
||||
if deferred:
|
||||
# SQLAlchemy Immutable mappings must be constructed in one shot
|
||||
instance = instance.__class__(deferred)
|
||||
|
||||
return instance
|
||||
|
||||
def _restore_object_instance_variables(self, obj, instance):
|
||||
instance = self._restore_from_dict(obj, instance)
|
||||
|
||||
# Handle list and set subclasses
|
||||
if has_tag(obj, tags.SEQ):
|
||||
if hasattr(instance, 'append'):
|
||||
for v in obj[tags.SEQ]:
|
||||
instance.append(self._restore(v))
|
||||
elif hasattr(instance, 'add'):
|
||||
for v in obj[tags.SEQ]:
|
||||
instance.add(self._restore(v))
|
||||
|
||||
if has_tag(obj, tags.STATE):
|
||||
instance = self._restore_state(obj, instance)
|
||||
|
||||
return instance
|
||||
|
||||
def _restore_state(self, obj, instance):
|
||||
state = self._restore(obj[tags.STATE])
|
||||
has_slots = (
|
||||
isinstance(state, tuple) and len(state) == 2 and isinstance(state[1], dict)
|
||||
)
|
||||
has_slots_and_dict = has_slots and isinstance(state[0], dict)
|
||||
if hasattr(instance, '__setstate__'):
|
||||
instance.__setstate__(state)
|
||||
elif isinstance(state, dict):
|
||||
# implements described default handling
|
||||
# of state for object with instance dict
|
||||
# and no slots
|
||||
instance = self._restore_from_dict(state, instance, ignorereserved=False)
|
||||
elif has_slots:
|
||||
instance = self._restore_from_dict(state[1], instance, ignorereserved=False)
|
||||
if has_slots_and_dict:
|
||||
instance = self._restore_from_dict(
|
||||
state[0], instance, ignorereserved=False
|
||||
)
|
||||
elif not hasattr(instance, '__getnewargs__') and not hasattr(
|
||||
instance, '__getnewargs_ex__'
|
||||
):
|
||||
# __setstate__ is not implemented so that means that the best
|
||||
# we can do is return the result of __getstate__() rather than
|
||||
# return an empty shell of an object.
|
||||
# However, if there were newargs, it's not an empty shell
|
||||
instance = state
|
||||
return instance
|
||||
|
||||
def _restore_list(self, obj):
|
||||
parent = []
|
||||
self._mkref(parent)
|
||||
children = [self._restore(v) for v in obj]
|
||||
parent.extend(children)
|
||||
method = _obj_setvalue
|
||||
proxies = [
|
||||
(parent, idx, value, method)
|
||||
for idx, value in enumerate(parent)
|
||||
if isinstance(value, _Proxy)
|
||||
]
|
||||
self._proxies.extend(proxies)
|
||||
return parent
|
||||
|
||||
def _restore_tuple(self, obj):
|
||||
return tuple([self._restore(v) for v in obj[tags.TUPLE]])
|
||||
|
||||
def _restore_set(self, obj):
|
||||
return {self._restore(v) for v in obj[tags.SET]}
|
||||
|
||||
def _restore_dict(self, obj):
|
||||
data = {}
|
||||
|
||||
# If we are decoding dicts that can have non-string keys then we
|
||||
# need to do a two-phase decode where the non-string keys are
|
||||
# processed last. This ensures a deterministic order when
|
||||
# assigning object IDs for references.
|
||||
if self.keys:
|
||||
# Phase 1: regular non-special keys.
|
||||
for k, v in util.items(obj):
|
||||
if _is_json_key(k):
|
||||
continue
|
||||
if isinstance(k, numeric_types):
|
||||
str_k = k.__str__()
|
||||
else:
|
||||
str_k = k
|
||||
self._namestack.append(str_k)
|
||||
data[k] = self._restore(v)
|
||||
|
||||
self._namestack.pop()
|
||||
|
||||
# Phase 2: object keys only.
|
||||
for k, v in util.items(obj):
|
||||
if not _is_json_key(k):
|
||||
continue
|
||||
self._namestack.append(k)
|
||||
|
||||
k = self._restore_pickled_key(k)
|
||||
data[k] = result = self._restore(v)
|
||||
# k is currently a proxy and must be replaced
|
||||
if isinstance(result, _Proxy):
|
||||
self._proxies.append((data, k, result, _obj_setvalue))
|
||||
|
||||
self._namestack.pop()
|
||||
else:
|
||||
# No special keys, thus we don't need to restore the keys either.
|
||||
for k, v in util.items(obj):
|
||||
if isinstance(k, numeric_types):
|
||||
str_k = k.__str__()
|
||||
else:
|
||||
str_k = k
|
||||
self._namestack.append(str_k)
|
||||
data[k] = self._restore(v)
|
||||
self._namestack.pop()
|
||||
return data
|
||||
|
||||
def _restore_key_fn(self):
|
||||
"""Return a callable that restores keys
|
||||
|
||||
This function is responsible for restoring non-string keys
|
||||
when we are decoding with `keys=True`.
|
||||
|
||||
"""
|
||||
# This function is called before entering a tight loop
|
||||
# where the returned function will be called.
|
||||
# We return a specific function after checking self.keys
|
||||
# instead of doing so in the body of the function to
|
||||
# avoid conditional branching inside a tight loop.
|
||||
if self.keys:
|
||||
restore_key = self._restore_pickled_key
|
||||
else:
|
||||
|
||||
def restore_key(key):
|
||||
return key
|
||||
|
||||
return restore_key
|
||||
|
||||
def _restore_pickled_key(self, key):
|
||||
"""Restore a possibly pickled key"""
|
||||
if _is_json_key(key):
|
||||
key = decode(
|
||||
key[len(tags.JSON_KEY) :],
|
||||
backend=self.backend,
|
||||
context=self,
|
||||
keys=True,
|
||||
reset=False,
|
||||
)
|
||||
return key
|
||||
|
||||
def _refname(self):
|
||||
"""Calculates the name of the current location in the JSON stack.
|
||||
|
||||
This is called as jsonpickle traverses the object structure to
|
||||
create references to previously-traversed objects. This allows
|
||||
cyclical data structures such as doubly-linked lists.
|
||||
jsonpickle ensures that duplicate python references to the same
|
||||
object results in only a single JSON object definition and
|
||||
special reference tags to represent each reference.
|
||||
|
||||
>>> u = Unpickler()
|
||||
>>> u._namestack = []
|
||||
>>> u._refname() == '/'
|
||||
True
|
||||
>>> u._namestack = ['a']
|
||||
>>> u._refname() == '/a'
|
||||
True
|
||||
>>> u._namestack = ['a', 'b']
|
||||
>>> u._refname() == '/a/b'
|
||||
True
|
||||
|
||||
"""
|
||||
return '/' + '/'.join(self._namestack)
|
||||
|
||||
def _mkref(self, obj):
|
||||
obj_id = id(obj)
|
||||
try:
|
||||
self._obj_to_idx[obj_id]
|
||||
except KeyError:
|
||||
self._obj_to_idx[obj_id] = len(self._objs)
|
||||
self._objs.append(obj)
|
||||
# Backwards compatibility: old versions of jsonpickle
|
||||
# produced "py/ref" references.
|
||||
self._namedict[self._refname()] = obj
|
||||
return obj
|
||||
|
||||
def _swapref(self, proxy, instance):
|
||||
proxy_id = id(proxy)
|
||||
instance_id = id(instance)
|
||||
|
||||
instance_index = self._obj_to_idx[proxy_id]
|
||||
self._obj_to_idx[instance_id] = instance_index
|
||||
del self._obj_to_idx[proxy_id]
|
||||
|
||||
self._objs[instance_index] = instance
|
||||
self._namedict[self._refname()] = instance
|
||||
|
||||
|
||||
def loadclass(module_and_name, classes=None):
|
||||
"""Loads the module and returns the class.
|
||||
|
||||
>>> cls = loadclass('datetime.datetime')
|
||||
>>> cls.__name__
|
||||
'datetime'
|
||||
|
||||
>>> loadclass('does.not.exist')
|
||||
|
||||
>>> loadclass('builtins.int')()
|
||||
0
|
||||
|
||||
"""
|
||||
# Check if the class exists in a caller-provided scope
|
||||
if classes:
|
||||
try:
|
||||
return classes[module_and_name]
|
||||
except KeyError:
|
||||
pass
|
||||
# Otherwise, load classes from globally-accessible imports
|
||||
names = module_and_name.split('.')
|
||||
# First assume that everything up to the last dot is the module name,
|
||||
# then try other splits to handle classes that are defined within
|
||||
# classes
|
||||
for up_to in range(len(names) - 1, 0, -1):
|
||||
module = util.untranslate_module_name('.'.join(names[:up_to]))
|
||||
try:
|
||||
__import__(module)
|
||||
obj = sys.modules[module]
|
||||
for class_name in names[up_to:]:
|
||||
obj = getattr(obj, class_name)
|
||||
return obj
|
||||
except (AttributeError, ImportError, ValueError):
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def getargs(obj, classes=None):
|
||||
"""Return arguments suitable for __new__()"""
|
||||
# Let saved newargs take precedence over everything
|
||||
if has_tag(obj, tags.NEWARGSEX):
|
||||
raise ValueError("__newargs_ex__ returns both args and kwargs")
|
||||
|
||||
if has_tag(obj, tags.NEWARGS):
|
||||
return obj[tags.NEWARGS]
|
||||
|
||||
if has_tag(obj, tags.INITARGS):
|
||||
return obj[tags.INITARGS]
|
||||
|
||||
try:
|
||||
seq_list = obj[tags.SEQ]
|
||||
obj_dict = obj[tags.OBJECT]
|
||||
except KeyError:
|
||||
return []
|
||||
typeref = loadclass(obj_dict, classes=classes)
|
||||
if not typeref:
|
||||
return []
|
||||
if hasattr(typeref, '_fields'):
|
||||
if len(typeref._fields) == len(seq_list):
|
||||
return seq_list
|
||||
return []
|
||||
|
||||
|
||||
class _trivialclassic:
|
||||
"""
|
||||
A trivial class that can be instantiated with no args
|
||||
"""
|
||||
|
||||
|
||||
def make_blank_classic(cls):
|
||||
"""
|
||||
Implement the mandated strategy for dealing with classic classes
|
||||
which cannot be instantiated without __getinitargs__ because they
|
||||
take parameters
|
||||
"""
|
||||
instance = _trivialclassic()
|
||||
instance.__class__ = cls
|
||||
return instance
|
||||
|
||||
|
||||
def loadrepr(reprstr):
|
||||
"""Returns an instance of the object from the object's repr() string.
|
||||
It involves the dynamic specification of code.
|
||||
|
||||
>>> obj = loadrepr('datetime/datetime.datetime.now()')
|
||||
>>> obj.__class__.__name__
|
||||
'datetime'
|
||||
|
||||
"""
|
||||
module, evalstr = reprstr.split('/')
|
||||
mylocals = locals()
|
||||
localname = module
|
||||
if '.' in localname:
|
||||
localname = module.split('.', 1)[0]
|
||||
mylocals[localname] = __import__(module)
|
||||
return eval(evalstr)
|
||||
|
||||
|
||||
def has_tag(obj, tag):
|
||||
"""Helper class that tests to see if the obj is a dictionary
|
||||
and contains a particular key/tag.
|
||||
|
||||
>>> obj = {'test': 1}
|
||||
>>> has_tag(obj, 'test')
|
||||
True
|
||||
>>> has_tag(obj, 'fail')
|
||||
False
|
||||
|
||||
>>> has_tag(42, 'fail')
|
||||
False
|
||||
|
||||
"""
|
||||
return type(obj) is dict and tag in obj
|
562
matteo_env/Lib/site-packages/jsonpickle/util.py
Normal file
562
matteo_env/Lib/site-packages/jsonpickle/util.py
Normal file
|
@ -0,0 +1,562 @@
|
|||
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
|
||||
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This software is licensed as described in the file COPYING, which
|
||||
# you should have received as part of this distribution.
|
||||
|
||||
"""Helper functions for pickling and unpickling. Most functions assist in
|
||||
determining the type of an object.
|
||||
"""
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
import base64
|
||||
import collections
|
||||
import io
|
||||
import operator
|
||||
import time
|
||||
import types
|
||||
import inspect
|
||||
|
||||
from . import tags
|
||||
from . import compat
|
||||
from .compat import (
|
||||
abc_iterator,
|
||||
class_types,
|
||||
iterator_types,
|
||||
numeric_types,
|
||||
PY2,
|
||||
PY3,
|
||||
PY3_ORDERED_DICT,
|
||||
)
|
||||
|
||||
if PY2:
|
||||
import __builtin__
|
||||
|
||||
SEQUENCES = (list, set, tuple)
|
||||
SEQUENCES_SET = {list, set, tuple}
|
||||
PRIMITIVES = {compat.ustr, bool, type(None)} | set(numeric_types)
|
||||
|
||||
|
||||
def is_type(obj):
|
||||
"""Returns True is obj is a reference to a type.
|
||||
|
||||
>>> is_type(1)
|
||||
False
|
||||
|
||||
>>> is_type(object)
|
||||
True
|
||||
|
||||
>>> class Klass: pass
|
||||
>>> is_type(Klass)
|
||||
True
|
||||
"""
|
||||
# use "isinstance" and not "is" to allow for metaclasses
|
||||
return isinstance(obj, class_types)
|
||||
|
||||
|
||||
def has_method(obj, name):
|
||||
# false if attribute doesn't exist
|
||||
if not hasattr(obj, name):
|
||||
return False
|
||||
func = getattr(obj, name)
|
||||
|
||||
# builtin descriptors like __getnewargs__
|
||||
if isinstance(func, types.BuiltinMethodType):
|
||||
return True
|
||||
|
||||
# note that FunctionType has a different meaning in py2/py3
|
||||
if not isinstance(func, (types.MethodType, types.FunctionType)):
|
||||
return False
|
||||
|
||||
# need to go through __dict__'s since in py3
|
||||
# methods are essentially descriptors
|
||||
|
||||
# __class__ for old-style classes
|
||||
base_type = obj if is_type(obj) else obj.__class__
|
||||
original = None
|
||||
# there is no .mro() for old-style classes
|
||||
for subtype in inspect.getmro(base_type):
|
||||
original = vars(subtype).get(name)
|
||||
if original is not None:
|
||||
break
|
||||
|
||||
# name not found in the mro
|
||||
if original is None:
|
||||
return False
|
||||
|
||||
# static methods are always fine
|
||||
if isinstance(original, staticmethod):
|
||||
return True
|
||||
|
||||
# at this point, the method has to be an instancemthod or a classmethod
|
||||
self_attr = '__self__' if PY3 else 'im_self'
|
||||
if not hasattr(func, self_attr):
|
||||
return False
|
||||
bound_to = getattr(func, self_attr)
|
||||
|
||||
# class methods
|
||||
if isinstance(original, classmethod):
|
||||
return issubclass(base_type, bound_to)
|
||||
|
||||
# bound methods
|
||||
return isinstance(obj, type(bound_to))
|
||||
|
||||
|
||||
def is_object(obj):
|
||||
"""Returns True is obj is a reference to an object instance.
|
||||
|
||||
>>> is_object(1)
|
||||
True
|
||||
|
||||
>>> is_object(object())
|
||||
True
|
||||
|
||||
>>> is_object(lambda x: 1)
|
||||
False
|
||||
"""
|
||||
return isinstance(obj, object) and not isinstance(
|
||||
obj, (type, types.FunctionType, types.BuiltinFunctionType)
|
||||
)
|
||||
|
||||
|
||||
def is_primitive(obj):
|
||||
"""Helper method to see if the object is a basic data type. Unicode strings,
|
||||
integers, longs, floats, booleans, and None are considered primitive
|
||||
and will return True when passed into *is_primitive()*
|
||||
|
||||
>>> is_primitive(3)
|
||||
True
|
||||
>>> is_primitive([4,4])
|
||||
False
|
||||
"""
|
||||
return type(obj) in PRIMITIVES
|
||||
|
||||
|
||||
def is_dictionary(obj):
|
||||
"""Helper method for testing if the object is a dictionary.
|
||||
|
||||
>>> is_dictionary({'key':'value'})
|
||||
True
|
||||
|
||||
"""
|
||||
return type(obj) is dict
|
||||
|
||||
|
||||
def is_sequence(obj):
|
||||
"""Helper method to see if the object is a sequence (list, set, or tuple).
|
||||
|
||||
>>> is_sequence([4])
|
||||
True
|
||||
|
||||
"""
|
||||
return type(obj) in SEQUENCES_SET
|
||||
|
||||
|
||||
def is_list(obj):
|
||||
"""Helper method to see if the object is a Python list.
|
||||
|
||||
>>> is_list([4])
|
||||
True
|
||||
"""
|
||||
return type(obj) is list
|
||||
|
||||
|
||||
def is_set(obj):
|
||||
"""Helper method to see if the object is a Python set.
|
||||
|
||||
>>> is_set(set())
|
||||
True
|
||||
"""
|
||||
return type(obj) is set
|
||||
|
||||
|
||||
def is_bytes(obj):
|
||||
"""Helper method to see if the object is a bytestring.
|
||||
|
||||
>>> is_bytes(b'foo')
|
||||
True
|
||||
"""
|
||||
return type(obj) is bytes
|
||||
|
||||
|
||||
def is_unicode(obj):
|
||||
"""Helper method to see if the object is a unicode string"""
|
||||
return type(obj) is compat.ustr
|
||||
|
||||
|
||||
def is_tuple(obj):
|
||||
"""Helper method to see if the object is a Python tuple.
|
||||
|
||||
>>> is_tuple((1,))
|
||||
True
|
||||
"""
|
||||
return type(obj) is tuple
|
||||
|
||||
|
||||
def is_dictionary_subclass(obj):
|
||||
"""Returns True if *obj* is a subclass of the dict type. *obj* must be
|
||||
a subclass and not the actual builtin dict.
|
||||
|
||||
>>> class Temp(dict): pass
|
||||
>>> is_dictionary_subclass(Temp())
|
||||
True
|
||||
"""
|
||||
# TODO: add UserDict
|
||||
return (
|
||||
hasattr(obj, '__class__')
|
||||
and issubclass(obj.__class__, dict)
|
||||
and type(obj) is not dict
|
||||
)
|
||||
|
||||
|
||||
def is_sequence_subclass(obj):
|
||||
"""Returns True if *obj* is a subclass of list, set or tuple.
|
||||
|
||||
*obj* must be a subclass and not the actual builtin, such
|
||||
as list, set, tuple, etc..
|
||||
|
||||
>>> class Temp(list): pass
|
||||
>>> is_sequence_subclass(Temp())
|
||||
True
|
||||
"""
|
||||
return (
|
||||
hasattr(obj, '__class__')
|
||||
and (issubclass(obj.__class__, SEQUENCES) or is_list_like(obj))
|
||||
and not is_sequence(obj)
|
||||
)
|
||||
|
||||
|
||||
def is_noncomplex(obj):
|
||||
"""Returns True if *obj* is a special (weird) class, that is more complex
|
||||
than primitive data types, but is not a full object. Including:
|
||||
|
||||
* :class:`~time.struct_time`
|
||||
"""
|
||||
if type(obj) is time.struct_time:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_function(obj):
|
||||
"""Returns true if passed a function
|
||||
|
||||
>>> is_function(lambda x: 1)
|
||||
True
|
||||
|
||||
>>> is_function(locals)
|
||||
True
|
||||
|
||||
>>> def method(): pass
|
||||
>>> is_function(method)
|
||||
True
|
||||
|
||||
>>> is_function(1)
|
||||
False
|
||||
"""
|
||||
function_types = (
|
||||
types.FunctionType,
|
||||
types.MethodType,
|
||||
types.LambdaType,
|
||||
types.BuiltinFunctionType,
|
||||
types.BuiltinMethodType,
|
||||
)
|
||||
return type(obj) in function_types
|
||||
|
||||
|
||||
def is_module_function(obj):
|
||||
"""Return True if `obj` is a module-global function
|
||||
|
||||
>>> import os
|
||||
>>> is_module_function(os.path.exists)
|
||||
True
|
||||
|
||||
>>> is_module_function(lambda: None)
|
||||
False
|
||||
|
||||
"""
|
||||
|
||||
return (
|
||||
hasattr(obj, '__class__')
|
||||
and isinstance(obj, (types.FunctionType, types.BuiltinFunctionType))
|
||||
and hasattr(obj, '__module__')
|
||||
and hasattr(obj, '__name__')
|
||||
and obj.__name__ != '<lambda>'
|
||||
)
|
||||
|
||||
|
||||
def is_module(obj):
|
||||
"""Returns True if passed a module
|
||||
|
||||
>>> import os
|
||||
>>> is_module(os)
|
||||
True
|
||||
|
||||
"""
|
||||
return isinstance(obj, types.ModuleType)
|
||||
|
||||
|
||||
def is_picklable(name, value):
|
||||
"""Return True if an object can be pickled
|
||||
|
||||
>>> import os
|
||||
>>> is_picklable('os', os)
|
||||
True
|
||||
|
||||
>>> def foo(): pass
|
||||
>>> is_picklable('foo', foo)
|
||||
True
|
||||
|
||||
>>> is_picklable('foo', lambda: None)
|
||||
False
|
||||
|
||||
"""
|
||||
if name in tags.RESERVED:
|
||||
return False
|
||||
return is_module_function(value) or not is_function(value)
|
||||
|
||||
|
||||
def is_installed(module):
|
||||
"""Tests to see if ``module`` is available on the sys.path
|
||||
|
||||
>>> is_installed('sys')
|
||||
True
|
||||
>>> is_installed('hopefullythisisnotarealmodule')
|
||||
False
|
||||
|
||||
"""
|
||||
try:
|
||||
__import__(module)
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def is_list_like(obj):
|
||||
return hasattr(obj, '__getitem__') and hasattr(obj, 'append')
|
||||
|
||||
|
||||
def is_iterator(obj):
|
||||
is_file = PY2 and isinstance(obj, __builtin__.file)
|
||||
return (
|
||||
isinstance(obj, abc_iterator) and not isinstance(obj, io.IOBase) and not is_file
|
||||
)
|
||||
|
||||
|
||||
def is_collections(obj):
|
||||
try:
|
||||
return type(obj).__module__ == 'collections'
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def is_reducible(obj):
|
||||
"""
|
||||
Returns false if of a type which have special casing,
|
||||
and should not have their __reduce__ methods used
|
||||
"""
|
||||
# defaultdicts may contain functions which we cannot serialise
|
||||
if is_collections(obj) and not isinstance(obj, collections.defaultdict):
|
||||
return True
|
||||
return not (
|
||||
is_list(obj)
|
||||
or is_list_like(obj)
|
||||
or is_primitive(obj)
|
||||
or is_bytes(obj)
|
||||
or is_unicode(obj)
|
||||
or is_dictionary(obj)
|
||||
or is_sequence(obj)
|
||||
or is_set(obj)
|
||||
or is_tuple(obj)
|
||||
or is_dictionary_subclass(obj)
|
||||
or is_sequence_subclass(obj)
|
||||
or is_function(obj)
|
||||
or is_module(obj)
|
||||
or isinstance(getattr(obj, '__slots__', None), iterator_types)
|
||||
or type(obj) is object
|
||||
or obj is object
|
||||
or (is_type(obj) and obj.__module__ == 'datetime')
|
||||
)
|
||||
|
||||
|
||||
def in_dict(obj, key, default=False):
|
||||
"""
|
||||
Returns true if key exists in obj.__dict__; false if not in.
|
||||
If obj.__dict__ is absent, return default
|
||||
"""
|
||||
return (key in obj.__dict__) if getattr(obj, '__dict__', None) else default
|
||||
|
||||
|
||||
def in_slots(obj, key, default=False):
|
||||
"""
|
||||
Returns true if key exists in obj.__slots__; false if not in.
|
||||
If obj.__slots__ is absent, return default
|
||||
"""
|
||||
return (key in obj.__slots__) if getattr(obj, '__slots__', None) else default
|
||||
|
||||
|
||||
def has_reduce(obj):
|
||||
"""
|
||||
Tests if __reduce__ or __reduce_ex__ exists in the object dict or
|
||||
in the class dicts of every class in the MRO *except object*.
|
||||
|
||||
Returns a tuple of booleans (has_reduce, has_reduce_ex)
|
||||
"""
|
||||
|
||||
if not is_reducible(obj) or is_type(obj):
|
||||
return (False, False)
|
||||
|
||||
# in this case, reduce works and is desired
|
||||
# notwithstanding depending on default object
|
||||
# reduce
|
||||
if is_noncomplex(obj):
|
||||
return (False, True)
|
||||
|
||||
has_reduce = False
|
||||
has_reduce_ex = False
|
||||
|
||||
REDUCE = '__reduce__'
|
||||
REDUCE_EX = '__reduce_ex__'
|
||||
|
||||
# For object instance
|
||||
has_reduce = in_dict(obj, REDUCE) or in_slots(obj, REDUCE)
|
||||
has_reduce_ex = in_dict(obj, REDUCE_EX) or in_slots(obj, REDUCE_EX)
|
||||
|
||||
# turn to the MRO
|
||||
for base in type(obj).__mro__:
|
||||
if is_reducible(base):
|
||||
has_reduce = has_reduce or in_dict(base, REDUCE)
|
||||
has_reduce_ex = has_reduce_ex or in_dict(base, REDUCE_EX)
|
||||
if has_reduce and has_reduce_ex:
|
||||
return (has_reduce, has_reduce_ex)
|
||||
|
||||
# for things that don't have a proper dict but can be
|
||||
# getattred (rare, but includes some builtins)
|
||||
cls = type(obj)
|
||||
object_reduce = getattr(object, REDUCE)
|
||||
object_reduce_ex = getattr(object, REDUCE_EX)
|
||||
if not has_reduce:
|
||||
has_reduce_cls = getattr(cls, REDUCE, False)
|
||||
if has_reduce_cls is not object_reduce:
|
||||
has_reduce = has_reduce_cls
|
||||
|
||||
if not has_reduce_ex:
|
||||
has_reduce_ex_cls = getattr(cls, REDUCE_EX, False)
|
||||
if has_reduce_ex_cls is not object_reduce_ex:
|
||||
has_reduce_ex = has_reduce_ex_cls
|
||||
|
||||
return (has_reduce, has_reduce_ex)
|
||||
|
||||
|
||||
def translate_module_name(module):
|
||||
"""Rename builtin modules to a consistent module name.
|
||||
|
||||
Prefer the more modern naming.
|
||||
|
||||
This is used so that references to Python's `builtins` module can
|
||||
be loaded in both Python 2 and 3. We remap to the "__builtin__"
|
||||
name and unmap it when importing.
|
||||
|
||||
Map the Python2 `exceptions` module to `builtins` because
|
||||
`builtins` is a superset and contains everything that is
|
||||
available in `exceptions`, which makes the translation simpler.
|
||||
|
||||
See untranslate_module_name() for the reverse operation.
|
||||
"""
|
||||
lookup = dict(__builtin__='builtins', exceptions='builtins')
|
||||
return lookup.get(module, module)
|
||||
|
||||
|
||||
def untranslate_module_name(module):
|
||||
"""Rename module names mention in JSON to names that we can import
|
||||
|
||||
This reverses the translation applied by translate_module_name() to
|
||||
a module name available to the current version of Python.
|
||||
|
||||
"""
|
||||
module = _0_9_6_compat_untranslate(module)
|
||||
lookup = dict(builtins='__builtin__') if PY2 else {}
|
||||
return lookup.get(module, module)
|
||||
|
||||
|
||||
def _0_9_6_compat_untranslate(module):
|
||||
"""Provide compatibility for pickles created with jsonpickle 0.9.6 and
|
||||
earlier, remapping `exceptions` and `__builtin__` to `builtins`.
|
||||
"""
|
||||
lookup = dict(__builtin__='builtins', exceptions='builtins')
|
||||
return lookup.get(module, module)
|
||||
|
||||
|
||||
def importable_name(cls):
|
||||
"""
|
||||
>>> class Example(object):
|
||||
... pass
|
||||
|
||||
>>> ex = Example()
|
||||
>>> importable_name(ex.__class__) == 'jsonpickle.util.Example'
|
||||
True
|
||||
>>> importable_name(type(25)) == 'builtins.int'
|
||||
True
|
||||
>>> importable_name(None.__class__) == 'builtins.NoneType'
|
||||
True
|
||||
>>> importable_name(False.__class__) == 'builtins.bool'
|
||||
True
|
||||
>>> importable_name(AttributeError) == 'builtins.AttributeError'
|
||||
True
|
||||
|
||||
"""
|
||||
# Use the fully-qualified name if available (Python >= 3.3)
|
||||
name = getattr(cls, '__qualname__', cls.__name__)
|
||||
module = translate_module_name(cls.__module__)
|
||||
return '{}.{}'.format(module, name)
|
||||
|
||||
|
||||
def b64encode(data):
|
||||
"""
|
||||
Encode binary data to ascii text in base64. Data must be bytes.
|
||||
"""
|
||||
return base64.b64encode(data).decode('ascii')
|
||||
|
||||
|
||||
def b64decode(payload):
|
||||
"""
|
||||
Decode payload - must be ascii text.
|
||||
"""
|
||||
return base64.b64decode(payload)
|
||||
|
||||
|
||||
def b85encode(data):
|
||||
"""
|
||||
Encode binary data to ascii text in base85. Data must be bytes.
|
||||
"""
|
||||
if PY2:
|
||||
raise NotImplementedError("Python 2 can't encode data in base85.")
|
||||
return base64.b85encode(data).decode('ascii')
|
||||
|
||||
|
||||
def b85decode(payload):
|
||||
"""
|
||||
Decode payload - must be ascii text.
|
||||
"""
|
||||
if PY2:
|
||||
raise NotImplementedError("Python 2 can't decode base85-encoded data.")
|
||||
return base64.b85decode(payload)
|
||||
|
||||
|
||||
def itemgetter(obj, getter=operator.itemgetter(0)):
|
||||
return compat.ustr(getter(obj))
|
||||
|
||||
|
||||
def items(obj):
|
||||
"""Iterate over dicts in a deterministic order
|
||||
|
||||
Python2 does not guarantee dict ordering, so this function
|
||||
papers over the difference in behavior. Python3 does guarantee
|
||||
dict order, without use of OrderedDict, so no sorting is needed there.
|
||||
|
||||
"""
|
||||
if PY3_ORDERED_DICT:
|
||||
for k, v in obj.items():
|
||||
yield k, v
|
||||
else:
|
||||
for k, v in sorted(obj.items(), key=itemgetter):
|
||||
yield k, v
|
21
matteo_env/Lib/site-packages/jsonpickle/version.py
Normal file
21
matteo_env/Lib/site-packages/jsonpickle/version.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import sys
|
||||
|
||||
try:
|
||||
if sys.version_info < (3, 8):
|
||||
import importlib_metadata as metadata
|
||||
else:
|
||||
from importlib import metadata
|
||||
except (ImportError, OSError):
|
||||
metadata = None
|
||||
|
||||
|
||||
def _get_version():
|
||||
default_version = '0.0.0-alpha'
|
||||
try:
|
||||
version = metadata.version('jsonpickle')
|
||||
except (AttributeError, ImportError, OSError):
|
||||
version = default_version
|
||||
return version
|
||||
|
||||
|
||||
__version__ = _get_version()
|
|
@ -108,6 +108,11 @@ async def on_message(msg):
|
|||
except:
|
||||
await msg.channel.send("We can't find your idol. Looked everywhere, too.")
|
||||
|
||||
elif command.startswith("showplayer "):
|
||||
player_name = json.loads(ono.get_stats(command.split(" ",1)[1]))
|
||||
await msg.channel.send(embed=build_star_embed(player_name))
|
||||
|
||||
|
||||
|
||||
elif command == "startgame" and msg.author.id in config()["owners"]:
|
||||
game_task = asyncio.create_task(watch_game(msg.channel))
|
||||
|
@ -118,11 +123,23 @@ async def on_message(msg):
|
|||
if game[0].name == msg.author.name:
|
||||
await msg.channel.send("There's already an active game with that name.")
|
||||
return
|
||||
|
||||
game_task = asyncio.create_task(setup_game(msg.channel, msg.author, games.game(msg.author.name, games.team(), games.team())))
|
||||
try:
|
||||
inningmax = int(command.split("setupgame ")[1])
|
||||
except:
|
||||
inningmax = 3
|
||||
game_task = asyncio.create_task(setup_game(msg.channel, msg.author, games.game(msg.author.name, games.team(), games.team(), length=inningmax)))
|
||||
await game_task
|
||||
|
||||
elif command.startswith("saveteam\n") and msg.author.id in config()["owners"]:
|
||||
save_task = asyncio.create_task(save_team_batch(msg, command))
|
||||
await save_task
|
||||
|
||||
elif command.startswith("showteam "):
|
||||
team = games.get_team(command.split(" ",1)[1])
|
||||
if team is not None:
|
||||
await msg.channel.send(embed=build_team_embed(team))
|
||||
else:
|
||||
await msg.channel.send("Can't find that team, boss. Typo?")
|
||||
|
||||
elif command == "credit":
|
||||
await msg.channel.send("Our avatar was graciously provided to us, with permission, by @HetreaSky on Twitter.")
|
||||
|
@ -276,6 +293,8 @@ async def watch_game(channel, game):
|
|||
first_base = discord.utils.get(client.emojis, id = 790899850320543745)
|
||||
second_base = discord.utils.get(client.emojis, id = 790900139656740865)
|
||||
third_base = discord.utils.get(client.emojis, id = 790900156597403658)
|
||||
out_emoji = discord.utils.get(client.emojis, id = 791578957241778226)
|
||||
in_emoji = discord.utils.get(client.emojis, id = 791578957244792832)
|
||||
|
||||
newgame = game
|
||||
embed = await channel.send("Play ball!")
|
||||
|
@ -301,7 +320,7 @@ async def watch_game(channel, game):
|
|||
new_embed.add_field(name="Inning:", value=f"🔼 {newgame.inning}", inline=True)
|
||||
else:
|
||||
new_embed.add_field(name="Inning:", value=f"🔽 {newgame.inning}", inline=True)
|
||||
new_embed.add_field(name="Outs:", value=newgame.outs, inline=True)
|
||||
new_embed.add_field(name="Outs:", value=f"{str(out_emoji)*newgame.outs+str(in_emoji)*(2-newgame.outs)}", inline=True)
|
||||
new_embed.add_field(name="Pitcher:", value=newgame.get_pitcher(), inline=False)
|
||||
new_embed.add_field(name="Batter:", value=newgame.get_batter(), inline=False)
|
||||
|
||||
|
@ -351,8 +370,16 @@ async def watch_game(channel, game):
|
|||
gamesarray.pop(gamesarray.index((newgame,use_emoji_names))) #cleanup is important!
|
||||
del newgame
|
||||
|
||||
def build_team_embed(team):
|
||||
embed = discord.Embed(color=discord.Color.purple(), title=team.name)
|
||||
lineup_string = ""
|
||||
for player in team.lineup:
|
||||
lineup_string += f"{player.name} {player.star_string('batting_stars')}\n"
|
||||
|
||||
|
||||
embed.add_field(name="Pitcher:", value=f"{team.pitcher.name} {team.pitcher.star_string('pitching_stars')}.", inline = False)
|
||||
embed.add_field(name="Lineup:", value=lineup_string, inline = False)
|
||||
embed.set_footer(text=team.slogan)
|
||||
return embed
|
||||
|
||||
def build_star_embed(player_json):
|
||||
starkeys = {"batting_stars" : "Batting", "pitching_stars" : "Pitching", "baserunning_stars" : "Baserunning", "defense_stars" : "Defense"}
|
||||
|
@ -373,5 +400,44 @@ def build_star_embed(player_json):
|
|||
return embed
|
||||
|
||||
|
||||
async def save_team_batch(message, command):
|
||||
newteam = games.team()
|
||||
#try:
|
||||
roster = command.split("\n",1)[1].split("\n")
|
||||
newteam.name = roster[0] #first line is team name
|
||||
newteam.slogan = roster[1] #second line is slogan
|
||||
for rosternum in range(2,len(roster)-1):
|
||||
if roster[rosternum] != "":
|
||||
newteam.add_lineup(games.player(ono.get_stats(roster[rosternum])))
|
||||
newteam.set_pitcher(games.player(ono.get_stats(roster[len(roster)-1]))) #last line is pitcher name
|
||||
|
||||
if len(newteam.name) > 30:
|
||||
await message.send("Team names have to be less than 30 characters! Try again.")
|
||||
return
|
||||
elif len(newteam.slogan) > 100:
|
||||
await message.send("We've given you 100 characters for the slogan. Discord puts limits on us and thus, we put limits on you. C'est la vie.")
|
||||
|
||||
await message.channel.send(embed=build_team_embed(newteam))
|
||||
checkmsg = await message.channel.send("Does this look good to you, boss?")
|
||||
await checkmsg.add_reaction("👍")
|
||||
await checkmsg.add_reaction("👎")
|
||||
|
||||
def react_check(react, user):
|
||||
return user == message.author and react.message == checkmsg
|
||||
|
||||
try:
|
||||
react, user = await client.wait_for('reaction_add', timeout=20.0, check=react_check)
|
||||
if react.emoji == "👍":
|
||||
await message.channel.send("You got it, chief. Saving now.")
|
||||
games.save_team(newteam)
|
||||
await message.channel.send("Saved! Thank you for flying Air Matteo. We hope you had a pleasant data entry.")
|
||||
return
|
||||
elif react.emoji == "👎":
|
||||
await message.channel.send("Message received. Pumping brakes, turning this car around. Try again, chief.")
|
||||
return
|
||||
except asyncio.TimeoutError:
|
||||
await message.channel.send("Look, I don't have all day. 20 seconds is long enough, right? Try again.")
|
||||
return
|
||||
#except:
|
||||
#await message.channel.send("uh.")
|
||||
client.run(config()["token"])
|
Loading…
Reference in New Issue
Block a user