Browse Source

deleted flask api restful + error handling

ash 2 years ago
parent
commit
fcefd05b71
36 changed files with 3215 additions and 97 deletions
  1. 23 0
      Backend/Sources/View/view_error_management.py
  2. 60 89
      Backend/Sources/View/view_manager.py
  3. 15 8
      Backend/Sources/utility/privilege_manager.py
  4. 1 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/INSTALLER
  5. 31 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/LICENSE
  6. 34 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/METADATA
  7. 11 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/RECORD
  8. 0 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/REQUESTED
  9. 6 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/WHEEL
  10. 1 0
      Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/top_level.txt
  11. 105 0
      Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/PKG-INFO
  12. 49 0
      Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/SOURCES.txt
  13. 1 0
      Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/dependency_links.txt
  14. 12 0
      Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/installed-files.txt
  15. 1 0
      Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/top_level.txt
  16. 22 0
      Backend/venv/Lib/site-packages/blinker/__init__.py
  17. 234 0
      Backend/venv/Lib/site-packages/blinker/_saferef.py
  18. 163 0
      Backend/venv/Lib/site-packages/blinker/_utilities.py
  19. 455 0
      Backend/venv/Lib/site-packages/blinker/base.py
  20. 1 0
      Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/INSTALLER
  21. 28 0
      Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/LICENSE.rst
  22. 67 0
      Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/METADATA
  23. 23 0
      Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/RECORD
  24. 5 0
      Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/WHEEL
  25. 1 0
      Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/top_level.txt
  26. 18 0
      Backend/venv/Lib/site-packages/cachelib/__init__.py
  27. 185 0
      Backend/venv/Lib/site-packages/cachelib/base.py
  28. 319 0
      Backend/venv/Lib/site-packages/cachelib/file.py
  29. 197 0
      Backend/venv/Lib/site-packages/cachelib/memcached.py
  30. 0 0
      Backend/venv/Lib/site-packages/cachelib/py.typed
  31. 160 0
      Backend/venv/Lib/site-packages/cachelib/redis.py
  32. 105 0
      Backend/venv/Lib/site-packages/cachelib/serializers.py
  33. 104 0
      Backend/venv/Lib/site-packages/cachelib/simple.py
  34. 83 0
      Backend/venv/Lib/site-packages/cachelib/uwsgi.py
  35. 109 0
      Backend/venv/Lib/site-packages/flask_session/__init__.py
  36. 586 0
      Backend/venv/Lib/site-packages/flask_session/sessions.py

+ 23 - 0
Backend/Sources/View/view_error_management.py

@@ -0,0 +1,23 @@
+import View.view_privilege as privileges
+import logging
+from flask import jsonify
+from utility.app_logging import logger_name
+logger = logging.getLogger(logger_name + ".VIEW")
+
+import traceback
+def define_error_management(app) :
+    @app.errorhandler(500)
+    def handle_error(err) :
+        """Return JSON instead of HTML for any other server error"""
+        logger.error(f"Unknown Exception: {str(err)}")
+        logger.debug(''.join(traceback.format_exception(etype=type(err), value=err, tb=err.__traceback__)))
+        response = {"error": str(err) }
+        return jsonify(response), 500
+
+    @app.errorhandler(privileges.APIAuthError)
+    def handle_error(err) :
+        """Return JSON instead of HTML for any other server error"""
+        logger.error(f"Unknown Exception: {str(err)}")
+        logger.debug(''.join(traceback.format_exception(etype=type(err), value=err, tb=err.__traceback__)))
+        response = {"error": str(err) }
+        return jsonify(response), 500

+ 60 - 89
Backend/Sources/View/view_manager.py

@@ -1,10 +1,14 @@
 from array import array
 from array import array
 from msilib.schema import Class
 from msilib.schema import Class
+from re import I
 import threading
 import threading
 from tkinter.ttk import Separator
 from tkinter.ttk import Separator
+from typing import Type
 from xmlrpc.client import boolean
 from xmlrpc.client import boolean
 from flask import session,Flask,request, jsonify
 from flask import session,Flask,request, jsonify
 from flask_restful import Resource, Api
 from flask_restful import Resource, Api
+import flask_restful
+from itsdangerous import json
 from sqlalchemy import delete
 from sqlalchemy import delete
 from utility.app_logging import logger_name
 from utility.app_logging import logger_name
 import logging
 import logging
@@ -14,18 +18,28 @@ import Model.model_manager as model_manager
 from werkzeug.security import check_password_hash
 from werkzeug.security import check_password_hash
 import anytree
 import anytree
 import View.view_privilege as privileges
 import View.view_privilege as privileges
+from datetime import timedelta
+from flask_session import Session
+import View.view_error_management as view_error_management
+logger = logging.getLogger(logger_name + ".VIEW")
+
+
 
 
 __app__ = Flask("OpenIsp")
 __app__ = Flask("OpenIsp")
-__api__ = Api(__app__)
 __app__.secret_key = "aseqzdwxc"
 __app__.secret_key = "aseqzdwxc"
+__app__.permanent_session_lifetime = timedelta(minutes=2)
+__app__.config["SESSION_PERMANENT"] = False
+__app__.config["SESSION_TYPE"] = "filesystem"
 
 
 
 
 __resource_array__ : array
 __resource_array__ : array
 
 
-logger = logging.getLogger(logger_name + ".VIEW")
+
 __id_counter__ : int = 1
 __id_counter__ : int = 1
 
 
 
 
+
+
 from werkzeug.serving import make_server
 from werkzeug.serving import make_server
 class ServerThread(threading.Thread):
 class ServerThread(threading.Thread):
 
 
@@ -44,33 +58,12 @@ class ServerThread(threading.Thread):
 
 
 __server_process__ : ServerThread
 __server_process__ : ServerThread
 
 
-
-session_counter_dict = dict()
-
-def add_connection_counter(Item : isp_model.user_account) :
-    if Item.nickname in session_counter_dict :
-        if session_counter_dict[(Item.nickname)] < Item.max_simultaneous_conn :
-            session_counter_dict[(Item.nickname)] = session_counter_dict[Item.nickname] + 1
-
-        else :
-            logger.warning("max simultaneous number of connections for account '" + Item.nickname + "' reached")
-            raise BaseException("max simultaneous number of connections for this account reached.")
-    else :
-        session_counter_dict[Item.nickname] = 1
-
-def delete_connection_counter(Item : isp_model.user_account) :
-    if Item.nickname in session_counter_dict :
-        if session_counter_dict[(Item.nickname)] > 0 :
-            session_counter_dict[(Item.nickname)] = session_counter_dict[(Item.nickname)] - 1
-        else :
-            session_counter_dict.pop(Item.nickname)
-    else :
-        raise BaseException("Cannot loggout already logout account")
+def get_roles_ids() :
+    return session["roles_ids"]
 
 
 
 
 def init() :
 def init() :
 
 
-
     @__app__.before_request
     @__app__.before_request
     def before_request_func():
     def before_request_func():
         print(request.headers.__dict__)
         print(request.headers.__dict__)
@@ -92,87 +85,63 @@ def init() :
             logger.debug("request from  " + session["username"])
             logger.debug("request from  " + session["username"])
 
 
 
 
-    class login(Resource):
-        def post(self):
-            _json = request.json
-            _username = _json['username']
-            _password = _json['password']
-
-
-            with persistence.get_Session_Instance() as sess :
-
-                Item = sess.query(isp_model.user_account).filter(isp_model.user_account.nickname == _username).first()
-                if not isinstance(Item,isp_model.user_account) :
-                    logger.warning("user tried to login with unknown account name : " + _username)
-                    resp = jsonify({'message' : 'Bad Request - user account not found'})
-                    resp.status_code = 400
-                    return resp
-
-                print(Item.password)
-                print(_password)
-
-                if not check_password_hash(Item.password,_password) :
-                    logger.warning("user with account name '" + _username + "' tried to login with invalid password")
-                    resp = jsonify({'message' : 'Bad Request - invalid password for this account'})
-                    resp.status_code = 400
-                    return resp
-
-                add_connection_counter(Item)
-
-                session["username"] = _username
-                session["account_data"] = model_manager.ModelObjectToJsonString(Item)
-                session["roles_ids"] = [privileges.inventory_read_only_role.id]
-                logger.info("account " + _username + " logged IN successfully with id : " + session["client_id"])
 
 
 
 
 
 
+    from flask.logging import default_handler
+    __app__.logger.removeHandler(default_handler)
 
 
-    class logout(Resource):
-        def post(self):
-
-            Item = isp_model.user_account(nickname = "dummy")
-            model_manager.setModelItemAttributesFromJson(Item,session["account_data"])
-
-
-            delete
-
-
-            logger.info("account " + session["username"] + " logged OUT with id : " + session["client_id"])
-            session.pop("username")
-            session.pop("account_data")
-            session.pop("client_id")
-
-
-    def get_roles_ids() :
-        return session["roles_ids"]
-
-    class get_routes(Resource):
-
-        @privileges.manager.require_authorization(privileges.inventory_admin_role,get_roles_ids)
-        def get(self):
-            routes = []
-            for route in __app__.url_map.iter_rules():
-                routes.append('%s' % route)
-            return routes
 
 
+    privileges.init()
+    view_error_management.define_error_management(__app__)
 
 
-    __api__.add_resource(get_routes, '/routes')
-    __api__.add_resource(login,  '/api/login')
-    __api__.add_resource(logout, '/api/logout')
+@__app__.route('/api/login',methods = ['POST'])
+def login():
+    _json = request.json
+    _username = _json['username']
+    _password = _json['password']
 
 
 
 
-    from flask.logging import default_handler
-    __app__.logger.removeHandler(default_handler)
+    with persistence.get_Session_Instance() as sess :
 
 
+        Item = sess.query(isp_model.user_account).filter(isp_model.user_account.nickname == _username).first()
+        if not isinstance(Item,isp_model.user_account) :
+            logger.warning("user tried to login with unknown account name : " + _username)
+            resp = jsonify({'message' : 'Bad Request - user account not found'})
+            resp.status_code = 400
+            return resp
 
 
+        if not check_password_hash(Item.password,_password) :
+            logger.warning("user with account name '" + _username + "' tried to login with invalid password")
+            resp = jsonify({'message' : 'Bad Request - invalid password for this account'})
+            resp.status_code = 400
+            return resp
 
 
 
 
-    privileges.init()
 
 
+        session["username"] = _username
+        session["account_data"] = model_manager.ModelObjectToJsonString(Item)
+        session["roles_ids"] = [privileges.inventory_read_only_role.id]
+        logger.info("account " + _username + " logged IN successfully with id : " + session["client_id"])
+        resp = jsonify({'message' : 'login successful'})
+        resp.status_code = 200
+        return resp
 
 
 
 
+@__app__.route('/api/logout',methods = ['POST'])
+def logout():
+    logger.info("account " + session["username"] + " logged OUT with id : " + session["client_id"])
+    session.clear()
+    return jsonify('logout')
 
 
 
 
+@__app__.route('/routes',methods = ['GET'])
+@privileges.manager.require_authorization(required_role=privileges.inventory_admin_role,ids_getter=get_roles_ids)
+def routes():
+    routes = []
+    for route in __app__.url_map.iter_rules():
+        routes.append('%s' % route)
+    return jsonify(routes)
 
 
 def run() :
 def run() :
     global __server_process__
     global __server_process__
@@ -180,6 +149,8 @@ def run() :
     __server_process__.start()
     __server_process__.start()
     logger.info('View server started')
     logger.info('View server started')
 
 
+
+
 def stop() :
 def stop() :
     global __server_process__
     global __server_process__
     __server_process__.shutdown()
     __server_process__.shutdown()

+ 15 - 8
Backend/Sources/utility/privilege_manager.py

@@ -9,11 +9,17 @@ def __generate_id__() :
 
 
     return ret
     return ret
 
 
-
+class APIAuthError(Exception):
+  code = 403
+  description = "Authentication Error"
 
 
 
 
 import logging
 import logging
-from app_logging import logger_name
+if __name__ == "__main__" :
+    from app_logging import logger_name
+else :
+    from utility.app_logging import logger_name
+
 logger = logging.getLogger(logger_name + ".PRIVILEGE")
 logger = logging.getLogger(logger_name + ".PRIVILEGE")
 
 
 class Privilege_Role :
 class Privilege_Role :
@@ -242,17 +248,18 @@ class Privilege_Manager :
             def wrap(*args, **kwargs):
             def wrap(*args, **kwargs):
                 if not self.is_role_registered(required_role) :
                 if not self.is_role_registered(required_role) :
                     raise BaseException("role is not registered everywhere")
                     raise BaseException("role is not registered everywhere")
-                print(ids_getter())
+                logger.debug("checking authorization with roles for function " + f.__name__ + ": " )
+                logger.debug("required role :  " + required_role.name )
                 for role in self.get_roles_by_ids(ids_getter()) : #an user can have multiple roles and each role can include other roles.
                 for role in self.get_roles_by_ids(ids_getter()) : #an user can have multiple roles and each role can include other roles.
+                    logger.debug("checking role : " + role.name)
                     for aggregated_role in role.aggregated_roles :
                     for aggregated_role in role.aggregated_roles :
-                        print (aggregated_role.name)
+
                         if aggregated_role == required_role :
                         if aggregated_role == required_role :
                             return f(*args, **kwargs)
                             return f(*args, **kwargs)
 
 
-                @functools.wraps(f)
                 def raiser(*args, **kwargs):
                 def raiser(*args, **kwargs):
-                    raise BaseException("access denied")
-
+                    logger.warning("access denied for function :" +  f.__name__)
+                    raise APIAuthError("Access Denied")
                 return raiser(*args, **kwargs)
                 return raiser(*args, **kwargs)
 
 
             return wrap
             return wrap
@@ -326,7 +333,7 @@ def test() :
 
 
     try :
     try :
         hello_admin(23,45)
         hello_admin(23,45)
-    except BaseException as Ex:
+    except Exception as Ex:
         print(str(Ex))
         print(str(Ex))
 
 
 
 

+ 1 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 31 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/LICENSE

@@ -0,0 +1,31 @@
+Copyright (c) 2014 by Shipeng Feng.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following
+  disclaimer in the documentation and/or other materials provided
+  with the distribution.
+
+* The names of the contributors may not be used to endorse or
+  promote products derived from this software without specific
+  prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 34 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/METADATA

@@ -0,0 +1,34 @@
+Metadata-Version: 2.1
+Name: Flask-Session
+Version: 0.4.0
+Summary: Adds server-side session support to your Flask application
+Home-page: https://github.com/fengsp/flask-session
+Author: Shipeng Feng
+Author-email: fsp261@gmail.com
+License: BSD
+Platform: any
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Dist: Flask (>=0.8)
+Requires-Dist: cachelib
+
+
+Flask-Session
+-------------
+
+Flask-Session is an extension for Flask that adds support for
+Server-side Session to your application.
+
+Links
+`````
+
+* `development version
+  <https://github.com/fengsp/flask-session/zipball/master#egg=Flask-dev>`_
+
+
+

+ 11 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/RECORD

@@ -0,0 +1,11 @@
+Flask_Session-0.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Flask_Session-0.4.0.dist-info/LICENSE,sha256=S3lNnKCO6cV706SpiqaHVtNMshfWXZAIhYnZx-1W4q4,1455
+Flask_Session-0.4.0.dist-info/METADATA,sha256=z5fKBiEzqMGBSuOVkPmc7Dkk-XbA7BJLmU6nDLrnw3Q,924
+Flask_Session-0.4.0.dist-info/RECORD,,
+Flask_Session-0.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Flask_Session-0.4.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+Flask_Session-0.4.0.dist-info/top_level.txt,sha256=NLMy-fPmNVJe6dlgHK_74-fLp-pQl_X60Gi06-miwdk,14
+flask_session/__init__.py,sha256=p_uu-alHjb7wP651oI63IrEOHJb3JtWEwTGz1QS3lVA,4223
+flask_session/__pycache__/__init__.cpython-39.pyc,,
+flask_session/__pycache__/sessions.cpython-39.pyc,,
+flask_session/sessions.py,sha256=cNYNqDhLIb6CmqDzhwgJ_Y2fx02tDMsfkM7m1F6aeyk,22431

+ 0 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/REQUESTED


+ 6 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/WHEEL

@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+

+ 1 - 0
Backend/venv/Lib/site-packages/Flask_Session-0.4.0.dist-info/top_level.txt

@@ -0,0 +1 @@
+flask_session

+ 105 - 0
Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/PKG-INFO

@@ -0,0 +1,105 @@
+Metadata-Version: 2.1
+Name: blinker
+Version: 1.4
+Summary: Fast, simple object-to-object and broadcast signaling
+Home-page: http://pythonhosted.org/blinker/
+Author: Jason Kirtland
+Author-email: jek@discorporate.us
+License: MIT License
+Keywords: signal emit events broadcast
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.0
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+License-File: LICENSE
+License-File: AUTHORS
+
+[![Build Status](https://travis-ci.org/jek/blinker.svg?branch=master)](https://travis-ci.org/jek/blinker)
+
+
+# Blinker
+
+Blinker provides a fast dispatching system that allows any number of
+interested parties to subscribe to events, or "signals".
+
+Signal receivers can subscribe to specific senders or receive signals
+sent by any sender.
+
+    >>> from blinker import signal
+    >>> started = signal('round-started')
+    >>> def each(round):
+    ...     print "Round %s!" % round
+    ...
+    >>> started.connect(each)
+    
+    >>> def round_two(round):
+    ...     print "This is round two."
+    ...
+    >>> started.connect(round_two, sender=2)
+  
+    >>> for round in range(1, 4):
+    ...     started.send(round)
+    ...
+    Round 1!
+    Round 2!
+    This is round two.
+    Round 3!
+
+See the [Blinker documentation](https://pythonhosted.org/blinker/) for more information.
+
+## Requirements
+
+Blinker requires Python 2.4 or higher, Python 3.0 or higher, or Jython 2.5 or higher.
+
+## Changelog Summary
+
+1.3 (July 3, 2013)
+
+ - The global signal stash behind blinker.signal() is now backed by a
+   regular name-to-Signal dictionary. Previously, weak references were
+   held in the mapping and ephemeral usage in code like
+   ``signal('foo').connect(...)`` could have surprising program behavior
+   depending on import order of modules.
+ - blinker.Namespace is now built on a regular dict. Use
+   blinker.WeakNamespace for the older, weak-referencing behavior.
+ - Signal.connect('text-sender') uses an alternate hashing strategy to
+   avoid sharp edges in text identity.
+
+1.2 (October 26, 2011)
+
+ - Added Signal.receiver_connected and Signal.receiver_disconnected
+   per-Signal signals.
+ - Deprecated the global 'receiver_connected' signal.
+ - Verified Python 3.2 support (no changes needed!)
+
+1.1 (July 21, 2010)
+
+ - Added ``@signal.connect_via(sender)`` decorator
+ - Added ``signal.connected_to`` shorthand name for the
+   ``temporarily_connected_to`` context manager.
+
+1.0 (March 28, 2010)
+
+ - Python 3.x compatibility
+
+0.9 (February 26, 2010)
+
+ - Sphinx docs, project website
+ - Added ``with a_signal.temporarily_connected_to(receiver): ...`` support
+
+

+ 49 - 0
Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/SOURCES.txt

@@ -0,0 +1,49 @@
+AUTHORS
+CHANGES
+LICENSE
+MANIFEST.in
+README.md
+setup.cfg
+setup.py
+blinker/__init__.py
+blinker/_saferef.py
+blinker/_utilities.py
+blinker/base.py
+blinker.egg-info/PKG-INFO
+blinker.egg-info/SOURCES.txt
+blinker.egg-info/dependency_links.txt
+blinker.egg-info/top_level.txt
+docs/html/genindex.html
+docs/html/index.html
+docs/html/objects.inv
+docs/html/search.html
+docs/html/searchindex.js
+docs/html/_sources/index.txt
+docs/html/_static/basic.css
+docs/html/_static/blinker-named.png
+docs/html/_static/blinker64.png
+docs/html/_static/comment-bright.png
+docs/html/_static/comment-close.png
+docs/html/_static/comment.png
+docs/html/_static/doctools.js
+docs/html/_static/down-pressed.png
+docs/html/_static/down.png
+docs/html/_static/file.png
+docs/html/_static/flasky.css
+docs/html/_static/jquery.js
+docs/html/_static/minus.png
+docs/html/_static/plus.png
+docs/html/_static/pygments.css
+docs/html/_static/searchtools.js
+docs/html/_static/underscore.js
+docs/html/_static/up-pressed.png
+docs/html/_static/up.png
+docs/html/_static/websupport.js
+docs/source/conf.py
+docs/source/index.rst
+docs/source/_themes/flask_theme_support.py
+docs/text/index.txt
+tests/test_context.py
+tests/test_saferef.py
+tests/test_signals.py
+tests/test_utilities.py

+ 1 - 0
Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/dependency_links.txt

@@ -0,0 +1 @@
+

+ 12 - 0
Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/installed-files.txt

@@ -0,0 +1,12 @@
+..\blinker\__init__.py
+..\blinker\__pycache__\__init__.cpython-39.pyc
+..\blinker\__pycache__\_saferef.cpython-39.pyc
+..\blinker\__pycache__\_utilities.cpython-39.pyc
+..\blinker\__pycache__\base.cpython-39.pyc
+..\blinker\_saferef.py
+..\blinker\_utilities.py
+..\blinker\base.py
+PKG-INFO
+SOURCES.txt
+dependency_links.txt
+top_level.txt

+ 1 - 0
Backend/venv/Lib/site-packages/blinker-1.4-py3.9.egg-info/top_level.txt

@@ -0,0 +1 @@
+blinker

+ 22 - 0
Backend/venv/Lib/site-packages/blinker/__init__.py

@@ -0,0 +1,22 @@
+from blinker.base import (
+    ANY,
+    NamedSignal,
+    Namespace,
+    Signal,
+    WeakNamespace,
+    receiver_connected,
+    signal,
+)
+
+__all__ = [
+    'ANY',
+    'NamedSignal',
+    'Namespace',
+    'Signal',
+    'WeakNamespace',
+    'receiver_connected',
+    'signal',
+    ]
+
+
+__version__ = '1.4'

+ 234 - 0
Backend/venv/Lib/site-packages/blinker/_saferef.py

@@ -0,0 +1,234 @@
+# extracted from Louie, http://pylouie.org/
+# updated for Python 3
+#
+# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher,
+#                    Matthew R. Scott
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#
+#     * Neither the name of the <ORGANIZATION> nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+"""Refactored 'safe reference from dispatcher.py"""
+
+import operator
+import sys
+import traceback
+import weakref
+
+
+try:
+    callable
+except NameError:
+    def callable(object):
+        return hasattr(object, '__call__')
+
+
+if sys.version_info < (3,):
+    get_self = operator.attrgetter('im_self')
+    get_func = operator.attrgetter('im_func')
+else:
+    get_self = operator.attrgetter('__self__')
+    get_func = operator.attrgetter('__func__')
+
+
+def safe_ref(target, on_delete=None):
+    """Return a *safe* weak reference to a callable target.
+
+    - ``target``: The object to be weakly referenced, if it's a bound
+      method reference, will create a BoundMethodWeakref, otherwise
+      creates a simple weakref.
+
+    - ``on_delete``: If provided, will have a hard reference stored to
+      the callable to be called after the safe reference goes out of
+      scope with the reference object, (either a weakref or a
+      BoundMethodWeakref) as argument.
+    """
+    try:
+        im_self = get_self(target)
+    except AttributeError:
+        if callable(on_delete):
+            return weakref.ref(target, on_delete)
+        else:
+            return weakref.ref(target)
+    else:
+        if im_self is not None:
+            # Turn a bound method into a BoundMethodWeakref instance.
+            # Keep track of these instances for lookup by disconnect().
+            assert hasattr(target, 'im_func') or hasattr(target, '__func__'), (
+                "safe_ref target %r has im_self, but no im_func, "
+                "don't know how to create reference" % target)
+            reference = BoundMethodWeakref(target=target, on_delete=on_delete)
+            return reference
+
+
+class BoundMethodWeakref(object):
+    """'Safe' and reusable weak references to instance methods.
+
+    BoundMethodWeakref objects provide a mechanism for referencing a
+    bound method without requiring that the method object itself
+    (which is normally a transient object) is kept alive.  Instead,
+    the BoundMethodWeakref object keeps weak references to both the
+    object and the function which together define the instance method.
+
+    Attributes:
+
+    - ``key``: The identity key for the reference, calculated by the
+      class's calculate_key method applied to the target instance method.
+
+    - ``deletion_methods``: Sequence of callable objects taking single
+      argument, a reference to this object which will be called when
+      *either* the target object or target function is garbage
+      collected (i.e. when this object becomes invalid).  These are
+      specified as the on_delete parameters of safe_ref calls.
+
+    - ``weak_self``: Weak reference to the target object.
+
+    - ``weak_func``: Weak reference to the target function.
+
+    Class Attributes:
+
+    - ``_all_instances``: Class attribute pointing to all live
+      BoundMethodWeakref objects indexed by the class's
+      calculate_key(target) method applied to the target objects.
+      This weak value dictionary is used to short-circuit creation so
+      that multiple references to the same (object, function) pair
+      produce the same BoundMethodWeakref instance.
+    """
+
+    _all_instances = weakref.WeakValueDictionary()
+
+    def __new__(cls, target, on_delete=None, *arguments, **named):
+        """Create new instance or return current instance.
+
+        Basically this method of construction allows us to
+        short-circuit creation of references to already- referenced
+        instance methods.  The key corresponding to the target is
+        calculated, and if there is already an existing reference,
+        that is returned, with its deletion_methods attribute updated.
+        Otherwise the new instance is created and registered in the
+        table of already-referenced methods.
+        """
+        key = cls.calculate_key(target)
+        current = cls._all_instances.get(key)
+        if current is not None:
+            current.deletion_methods.append(on_delete)
+            return current
+        else:
+            base = super(BoundMethodWeakref, cls).__new__(cls)
+            cls._all_instances[key] = base
+            base.__init__(target, on_delete, *arguments, **named)
+            return base
+
+    def __init__(self, target, on_delete=None):
+        """Return a weak-reference-like instance for a bound method.
+
+        - ``target``: The instance-method target for the weak reference,
+          must have im_self and im_func attributes and be
+          reconstructable via the following, which is true of built-in
+          instance methods::
+
+            target.im_func.__get__( target.im_self )
+
+        - ``on_delete``: Optional callback which will be called when
+          this weak reference ceases to be valid (i.e. either the
+          object or the function is garbage collected).  Should take a
+          single argument, which will be passed a pointer to this
+          object.
+        """
+        def remove(weak, self=self):
+            """Set self.isDead to True when method or instance is destroyed."""
+            methods = self.deletion_methods[:]
+            del self.deletion_methods[:]
+            try:
+                del self.__class__._all_instances[self.key]
+            except KeyError:
+                pass
+            for function in methods:
+                try:
+                    if callable(function):
+                        function(self)
+                except Exception:
+                    try:
+                        traceback.print_exc()
+                    except AttributeError:
+                        e = sys.exc_info()[1]
+                        print ('Exception during saferef %s '
+                               'cleanup function %s: %s' % (self, function, e))
+        self.deletion_methods = [on_delete]
+        self.key = self.calculate_key(target)
+        im_self = get_self(target)
+        im_func = get_func(target)
+        self.weak_self = weakref.ref(im_self, remove)
+        self.weak_func = weakref.ref(im_func, remove)
+        self.self_name = str(im_self)
+        self.func_name = str(im_func.__name__)
+
+    def calculate_key(cls, target):
+        """Calculate the reference key for this reference.
+
+        Currently this is a two-tuple of the id()'s of the target
+        object and the target function respectively.
+        """
+        return (id(get_self(target)), id(get_func(target)))
+    calculate_key = classmethod(calculate_key)
+
+    def __str__(self):
+        """Give a friendly representation of the object."""
+        return "%s(%s.%s)" % (
+            self.__class__.__name__,
+            self.self_name,
+            self.func_name,
+            )
+
+    __repr__ = __str__
+
+    def __nonzero__(self):
+        """Whether we are still a valid reference."""
+        return self() is not None
+
+    def __cmp__(self, other):
+        """Compare with another reference."""
+        if not isinstance(other, self.__class__):
+            return cmp(self.__class__, type(other))
+        return cmp(self.key, other.key)
+
+    def __call__(self):
+        """Return a strong reference to the bound method.
+
+        If the target cannot be retrieved, then will return None,
+        otherwise returns a bound instance method for our object and
+        function.
+
+        Note: You may call this method any number of times, as it does
+        not invalidate the reference.
+        """
+        target = self.weak_self()
+        if target is not None:
+            function = self.weak_func()
+            if function is not None:
+                return function.__get__(target)
+        return None

+ 163 - 0
Backend/venv/Lib/site-packages/blinker/_utilities.py

@@ -0,0 +1,163 @@
+from weakref import ref
+
+from blinker._saferef import BoundMethodWeakref
+
+
+try:
+    callable
+except NameError:
+    def callable(object):
+        return hasattr(object, '__call__')
+
+
+try:
+    from collections import defaultdict
+except:
+    class defaultdict(dict):
+
+        def __init__(self, default_factory=None, *a, **kw):
+            if (default_factory is not None and
+                not hasattr(default_factory, '__call__')):
+                raise TypeError('first argument must be callable')
+            dict.__init__(self, *a, **kw)
+            self.default_factory = default_factory
+
+        def __getitem__(self, key):
+            try:
+                return dict.__getitem__(self, key)
+            except KeyError:
+                return self.__missing__(key)
+
+        def __missing__(self, key):
+            if self.default_factory is None:
+                raise KeyError(key)
+            self[key] = value = self.default_factory()
+            return value
+
+        def __reduce__(self):
+            if self.default_factory is None:
+                args = tuple()
+            else:
+                args = self.default_factory,
+            return type(self), args, None, None, self.items()
+
+        def copy(self):
+            return self.__copy__()
+
+        def __copy__(self):
+            return type(self)(self.default_factory, self)
+
+        def __deepcopy__(self, memo):
+            import copy
+            return type(self)(self.default_factory,
+                              copy.deepcopy(self.items()))
+
+        def __repr__(self):
+            return 'defaultdict(%s, %s)' % (self.default_factory,
+                                            dict.__repr__(self))
+
+
+try:
+    from contextlib import contextmanager
+except ImportError:
+    def contextmanager(fn):
+        def oops(*args, **kw):
+            raise RuntimeError("Python 2.5 or above is required to use "
+                               "context managers.")
+        oops.__name__ = fn.__name__
+        return oops
+
+class _symbol(object):
+
+    def __init__(self, name):
+        """Construct a new named symbol."""
+        self.__name__ = self.name = name
+
+    def __reduce__(self):
+        return symbol, (self.name,)
+
+    def __repr__(self):
+        return self.name
+_symbol.__name__ = 'symbol'
+
+
+class symbol(object):
+    """A constant symbol.
+
+    >>> symbol('foo') is symbol('foo')
+    True
+    >>> symbol('foo')
+    foo
+
+    A slight refinement of the MAGICCOOKIE=object() pattern.  The primary
+    advantage of symbol() is its repr().  They are also singletons.
+
+    Repeated calls of symbol('name') will all return the same instance.
+
+    """
+    symbols = {}
+
+    def __new__(cls, name):
+        try:
+            return cls.symbols[name]
+        except KeyError:
+            return cls.symbols.setdefault(name, _symbol(name))
+
+
+try:
+    text = (str, unicode)
+except NameError:
+    text = str
+
+
+def hashable_identity(obj):
+    if hasattr(obj, '__func__'):
+        return (id(obj.__func__), id(obj.__self__))
+    elif hasattr(obj, 'im_func'):
+        return (id(obj.im_func), id(obj.im_self))
+    elif isinstance(obj, text):
+        return obj
+    else:
+        return id(obj)
+
+
+WeakTypes = (ref, BoundMethodWeakref)
+
+
+class annotatable_weakref(ref):
+    """A weakref.ref that supports custom instance attributes."""
+
+
+def reference(object, callback=None, **annotations):
+    """Return an annotated weak ref."""
+    if callable(object):
+        weak = callable_reference(object, callback)
+    else:
+        weak = annotatable_weakref(object, callback)
+    for key, value in annotations.items():
+        setattr(weak, key, value)
+    return weak
+
+
+def callable_reference(object, callback=None):
+    """Return an annotated weak ref, supporting bound instance methods."""
+    if hasattr(object, 'im_self') and object.im_self is not None:
+        return BoundMethodWeakref(target=object, on_delete=callback)
+    elif hasattr(object, '__self__') and object.__self__ is not None:
+        return BoundMethodWeakref(target=object, on_delete=callback)
+    return annotatable_weakref(object, callback)
+
+
+class lazy_property(object):
+    """A @property that is only evaluated once."""
+
+    def __init__(self, deferred):
+        self._deferred = deferred
+        self.__doc__ = deferred.__doc__
+
+    def __get__(self, obj, cls):
+        if obj is None:
+            return self
+        value = self._deferred(obj)
+        setattr(obj, self._deferred.__name__, value)
+        return value

+ 455 - 0
Backend/venv/Lib/site-packages/blinker/base.py

@@ -0,0 +1,455 @@
+# -*- coding: utf-8; fill-column: 76 -*-
+"""Signals and events.
+
+A small implementation of signals, inspired by a snippet of Django signal
+API client code seen in a blog post.  Signals are first-class objects and
+each manages its own receivers and message emission.
+
+The :func:`signal` function provides singleton behavior for named signals.
+
+"""
+from warnings import warn
+from weakref import WeakValueDictionary
+
+from blinker._utilities import (
+    WeakTypes,
+    contextmanager,
+    defaultdict,
+    hashable_identity,
+    lazy_property,
+    reference,
+    symbol,
+    )
+
+
+ANY = symbol('ANY')
+ANY.__doc__ = 'Token for "any sender".'
+ANY_ID = 0
+
+
+class Signal(object):
+    """A notification emitter."""
+
+    #: An :obj:`ANY` convenience synonym, allows ``Signal.ANY``
+    #: without an additional import.
+    ANY = ANY
+
+    @lazy_property
+    def receiver_connected(self):
+        """Emitted after each :meth:`connect`.
+
+        The signal sender is the signal instance, and the :meth:`connect`
+        arguments are passed through: *receiver*, *sender*, and *weak*.
+
+        .. versionadded:: 1.2
+
+        """
+        return Signal(doc="Emitted after a receiver connects.")
+
+    @lazy_property
+    def receiver_disconnected(self):
+        """Emitted after :meth:`disconnect`.
+
+        The sender is the signal instance, and the :meth:`disconnect` arguments
+        are passed through: *receiver* and *sender*.
+
+        Note, this signal is emitted **only** when :meth:`disconnect` is
+        called explicitly.
+
+        The disconnect signal can not be emitted by an automatic disconnect
+        (due to a weakly referenced receiver or sender going out of scope),
+        as the receiver and/or sender instances are no longer available for
+        use at the time this signal would be emitted.
+
+        An alternative approach is available by subscribing to
+        :attr:`receiver_connected` and setting up a custom weakref cleanup
+        callback on weak receivers and senders.
+
+        .. versionadded:: 1.2
+
+        """
+        return Signal(doc="Emitted after a receiver disconnects.")
+
+    def __init__(self, doc=None):
+        """
+        :param doc: optional.  If provided, will be assigned to the signal's
+          __doc__ attribute.
+
+        """
+        if doc:
+            self.__doc__ = doc
+        #: A mapping of connected receivers.
+        #:
+        #: The values of this mapping are not meaningful outside of the
+        #: internal :class:`Signal` implementation, however the boolean value
+        #: of the mapping is useful as an extremely efficient check to see if
+        #: any receivers are connected to the signal.
+        self.receivers = {}
+        self._by_receiver = defaultdict(set)
+        self._by_sender = defaultdict(set)
+        self._weak_senders = {}
+
+    def connect(self, receiver, sender=ANY, weak=True):
+        """Connect *receiver* to signal events sent by *sender*.
+
+        :param receiver: A callable.  Will be invoked by :meth:`send` with
+          `sender=` as a single positional argument and any \*\*kwargs that
+          were provided to a call to :meth:`send`.
+
+        :param sender: Any object or :obj:`ANY`, defaults to ``ANY``.
+          Restricts notifications delivered to *receiver* to only those
+          :meth:`send` emissions sent by *sender*.  If ``ANY``, the receiver
+          will always be notified.  A *receiver* may be connected to
+          multiple *sender* values on the same Signal through multiple calls
+          to :meth:`connect`.
+
+        :param weak: If true, the Signal will hold a weakref to *receiver*
+          and automatically disconnect when *receiver* goes out of scope or
+          is garbage collected.  Defaults to True.
+
+        """
+        receiver_id = hashable_identity(receiver)
+        if weak:
+            receiver_ref = reference(receiver, self._cleanup_receiver)
+            receiver_ref.receiver_id = receiver_id
+        else:
+            receiver_ref = receiver
+        if sender is ANY:
+            sender_id = ANY_ID
+        else:
+            sender_id = hashable_identity(sender)
+
+        self.receivers.setdefault(receiver_id, receiver_ref)
+        self._by_sender[sender_id].add(receiver_id)
+        self._by_receiver[receiver_id].add(sender_id)
+        del receiver_ref
+
+        if sender is not ANY and sender_id not in self._weak_senders:
+            # wire together a cleanup for weakref-able senders
+            try:
+                sender_ref = reference(sender, self._cleanup_sender)
+                sender_ref.sender_id = sender_id
+            except TypeError:
+                pass
+            else:
+                self._weak_senders.setdefault(sender_id, sender_ref)
+                del sender_ref
+
+        # broadcast this connection.  if receivers raise, disconnect.
+        if ('receiver_connected' in self.__dict__ and
+            self.receiver_connected.receivers):
+            try:
+                self.receiver_connected.send(self,
+                                             receiver=receiver,
+                                             sender=sender,
+                                             weak=weak)
+            except:
+                self.disconnect(receiver, sender)
+                raise
+        if receiver_connected.receivers and self is not receiver_connected:
+            try:
+                receiver_connected.send(self,
+                                        receiver_arg=receiver,
+                                        sender_arg=sender,
+                                        weak_arg=weak)
+            except:
+                self.disconnect(receiver, sender)
+                raise
+        return receiver
+
+    def connect_via(self, sender, weak=False):
+        """Connect the decorated function as a receiver for *sender*.
+
+        :param sender: Any object or :obj:`ANY`.  The decorated function
+          will only receive :meth:`send` emissions sent by *sender*.  If
+          ``ANY``, the receiver will always be notified.  A function may be
+          decorated multiple times with differing *sender* values.
+
+        :param weak: If true, the Signal will hold a weakref to the
+          decorated function and automatically disconnect when *receiver*
+          goes out of scope or is garbage collected.  Unlike
+          :meth:`connect`, this defaults to False.
+
+        The decorated function will be invoked by :meth:`send` with
+          `sender=` as a single positional argument and any \*\*kwargs that
+          were provided to the call to :meth:`send`.
+
+
+        .. versionadded:: 1.1
+
+        """
+        def decorator(fn):
+            self.connect(fn, sender, weak)
+            return fn
+        return decorator
+
+    @contextmanager
+    def connected_to(self, receiver, sender=ANY):
+        """Execute a block with the signal temporarily connected to *receiver*.
+
+        :param receiver: a receiver callable
+        :param sender: optional, a sender to filter on
+
+        This is a context manager for use in the ``with`` statement.  It can
+        be useful in unit tests.  *receiver* is connected to the signal for
+        the duration of the ``with`` block, and will be disconnected
+        automatically when exiting the block:
+
+        .. testsetup::
+
+          from __future__ import with_statement
+          from blinker import Signal
+          on_ready = Signal()
+          receiver = lambda sender: None
+
+        .. testcode::
+
+          with on_ready.connected_to(receiver):
+             # do stuff
+             on_ready.send(123)
+
+        .. versionadded:: 1.1
+
+        """
+        self.connect(receiver, sender=sender, weak=False)
+        try:
+            yield None
+        except:
+            self.disconnect(receiver)
+            raise
+        else:
+            self.disconnect(receiver)
+
+    def temporarily_connected_to(self, receiver, sender=ANY):
+        """An alias for :meth:`connected_to`.
+
+        :param receiver: a receiver callable
+        :param sender: optional, a sender to filter on
+
+        .. versionadded:: 0.9
+
+        .. versionchanged:: 1.1
+          Renamed to :meth:`connected_to`.  ``temporarily_connected_to`` was
+          deprecated in 1.2 and will be removed in a subsequent version.
+
+        """
+        warn("temporarily_connected_to is deprecated; "
+             "use connected_to instead.",
+             DeprecationWarning)
+        return self.connected_to(receiver, sender)
+
+    def send(self, *sender, **kwargs):
+        """Emit this signal on behalf of *sender*, passing on \*\*kwargs.
+
+        Returns a list of 2-tuples, pairing receivers with their return
+        value. The ordering of receiver notification is undefined.
+
+        :param \*sender: Any object or ``None``.  If omitted, synonymous
+          with ``None``.  Only accepts one positional argument.
+
+        :param \*\*kwargs: Data to be sent to receivers.
+
+        """
+        # Using '*sender' rather than 'sender=None' allows 'sender' to be
+        # used as a keyword argument- i.e. it's an invisible name in the
+        # function signature.
+        if len(sender) == 0:
+            sender = None
+        elif len(sender) > 1:
+            raise TypeError('send() accepts only one positional argument, '
+                            '%s given' % len(sender))
+        else:
+            sender = sender[0]
+        if not self.receivers:
+            return []
+        else:
+            return [(receiver, receiver(sender, **kwargs))
+                    for receiver in self.receivers_for(sender)]
+
+    def has_receivers_for(self, sender):
+        """True if there is probably a receiver for *sender*.
+
+        Performs an optimistic check only.  Does not guarantee that all
+        weakly referenced receivers are still alive.  See
+        :meth:`receivers_for` for a stronger search.
+
+        """
+        if not self.receivers:
+            return False
+        if self._by_sender[ANY_ID]:
+            return True
+        if sender is ANY:
+            return False
+        return hashable_identity(sender) in self._by_sender
+
+    def receivers_for(self, sender):
+        """Iterate all live receivers listening for *sender*."""
+        # TODO: test receivers_for(ANY)
+        if self.receivers:
+            sender_id = hashable_identity(sender)
+            if sender_id in self._by_sender:
+                ids = (self._by_sender[ANY_ID] |
+                       self._by_sender[sender_id])
+            else:
+                ids = self._by_sender[ANY_ID].copy()
+            for receiver_id in ids:
+                receiver = self.receivers.get(receiver_id)
+                if receiver is None:
+                    continue
+                if isinstance(receiver, WeakTypes):
+                    strong = receiver()
+                    if strong is None:
+                        self._disconnect(receiver_id, ANY_ID)
+                        continue
+                    receiver = strong
+                yield receiver
+
+    def disconnect(self, receiver, sender=ANY):
+        """Disconnect *receiver* from this signal's events.
+
+        :param receiver: a previously :meth:`connected<connect>` callable
+
+        :param sender: a specific sender to disconnect from, or :obj:`ANY`
+          to disconnect from all senders.  Defaults to ``ANY``.
+
+        """
+        if sender is ANY:
+            sender_id = ANY_ID
+        else:
+            sender_id = hashable_identity(sender)
+        receiver_id = hashable_identity(receiver)
+        self._disconnect(receiver_id, sender_id)
+
+        if ('receiver_disconnected' in self.__dict__ and
+            self.receiver_disconnected.receivers):
+            self.receiver_disconnected.send(self,
+                                            receiver=receiver,
+                                            sender=sender)
+
+    def _disconnect(self, receiver_id, sender_id):
+        if sender_id == ANY_ID:
+            if self._by_receiver.pop(receiver_id, False):
+                for bucket in self._by_sender.values():
+                    bucket.discard(receiver_id)
+            self.receivers.pop(receiver_id, None)
+        else:
+            self._by_sender[sender_id].discard(receiver_id)
+            self._by_receiver[receiver_id].discard(sender_id)
+
+    def _cleanup_receiver(self, receiver_ref):
+        """Disconnect a receiver from all senders."""
+        self._disconnect(receiver_ref.receiver_id, ANY_ID)
+
+    def _cleanup_sender(self, sender_ref):
+        """Disconnect all receivers from a sender."""
+        sender_id = sender_ref.sender_id
+        assert sender_id != ANY_ID
+        self._weak_senders.pop(sender_id, None)
+        for receiver_id in self._by_sender.pop(sender_id, ()):
+            self._by_receiver[receiver_id].discard(sender_id)
+
+    def _cleanup_bookkeeping(self):
+        """Prune unused sender/receiver bookeeping. Not threadsafe.
+
+        Connecting & disconnecting leave behind a small amount of bookeeping
+        for the receiver and sender values. Typical workloads using Blinker,
+        for example in most web apps, Flask, CLI scripts, etc., are not
+        adversely affected by this bookkeeping.
+
+        With a long-running Python process performing dynamic signal routing
+        with high volume- e.g. connecting to function closures, "senders" are
+        all unique object instances, and doing all of this over and over- you
+        may see memory usage will grow due to extraneous bookeeping. (An empty
+        set() for each stale sender/receiver pair.)
+
+        This method will prune that bookeeping away, with the caveat that such
+        pruning is not threadsafe. The risk is that cleanup of a fully
+        disconnected receiver/sender pair occurs while another thread is
+        connecting that same pair. If you are in the highly dynamic, unique
+        receiver/sender situation that has lead you to this method, that
+        failure mode is perhaps not a big deal for you.
+        """
+        for mapping in (self._by_sender, self._by_receiver):
+            for _id, bucket in list(mapping.items()):
+                if not bucket:
+                    mapping.pop(_id, None)
+
+    def _clear_state(self):
+        """Throw away all signal state.  Useful for unit tests."""
+        self._weak_senders.clear()
+        self.receivers.clear()
+        self._by_sender.clear()
+        self._by_receiver.clear()
+
+
+receiver_connected = Signal("""\
+Sent by a :class:`Signal` after a receiver connects.
+
+:argument: the Signal that was connected to
+:keyword receiver_arg: the connected receiver
+:keyword sender_arg: the sender to connect to
+:keyword weak_arg: true if the connection to receiver_arg is a weak reference
+
+.. deprecated:: 1.2
+
+As of 1.2, individual signals have their own private
+:attr:`~Signal.receiver_connected` and
+:attr:`~Signal.receiver_disconnected` signals with a slightly simplified
+call signature.  This global signal is planned to be removed in 1.6.
+
+""")
+
+
+class NamedSignal(Signal):
+    """A named generic notification emitter."""
+
+    def __init__(self, name, doc=None):
+        Signal.__init__(self, doc)
+
+        #: The name of this signal.
+        self.name = name
+
+    def __repr__(self):
+        base = Signal.__repr__(self)
+        return "%s; %r>" % (base[:-1], self.name)
+
+
+class Namespace(dict):
+    """A mapping of signal names to signals."""
+
+    def signal(self, name, doc=None):
+        """Return the :class:`NamedSignal` *name*, creating it if required.
+
+        Repeated calls to this function will return the same signal object.
+
+        """
+        try:
+            return self[name]
+        except KeyError:
+            return self.setdefault(name, NamedSignal(name, doc))
+
+
+class WeakNamespace(WeakValueDictionary):
+    """A weak mapping of signal names to signals.
+
+    Automatically cleans up unused Signals when the last reference goes out
+    of scope.  This namespace implementation exists for a measure of legacy
+    compatibility with Blinker <= 1.2, and may be dropped in the future.
+
+    .. versionadded:: 1.3
+
+    """
+
+    def signal(self, name, doc=None):
+        """Return the :class:`NamedSignal` *name*, creating it if required.
+
+        Repeated calls to this function will return the same signal object.
+
+        """
+        try:
+            return self[name]
+        except KeyError:
+            return self.setdefault(name, NamedSignal(name, doc))
+
+
+signal = Namespace().signal

+ 1 - 0
Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 28 - 0
Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/LICENSE.rst

@@ -0,0 +1,28 @@
+Copyright 2018 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 67 - 0
Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/METADATA

@@ -0,0 +1,67 @@
+Metadata-Version: 2.1
+Name: cachelib
+Version: 0.6.0
+Summary: A collection of cache libraries in the same API interface.
+Home-page: https://github.com/pallets/cachelib/
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://cachelib.readthedocs.io/
+Project-URL: Changes, https://cachelib.readthedocs.io/changes/
+Project-URL: Source Code, https://github.com/pallets/cachelib/
+Project-URL: Issue Tracker, https://github.com/pallets/cachelib/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+
+CacheLib
+========
+
+A collection of cache libraries in the same API interface. Extracted
+from Werkzeug.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    $ pip install -U cachelib
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+Donate
+------
+
+The Pallets organization develops and supports Flask and the libraries
+it uses. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://cachelib.readthedocs.io/
+-   Changes: https://cachelib.readthedocs.io/changes/
+-   PyPI Releases: https://pypi.org/project/cachelib/
+-   Source Code: https://github.com/pallets/cachelib/
+-   Issue Tracker: https://github.com/pallets/cachelib/issues/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+

+ 23 - 0
Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/RECORD

@@ -0,0 +1,23 @@
+cachelib-0.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cachelib-0.6.0.dist-info/LICENSE.rst,sha256=zUGBIIEtwmJiga4CfoG2SCKdFmtaynRyzs1RADjTbn0,1475
+cachelib-0.6.0.dist-info/METADATA,sha256=lRazygkqhX7GMS7gZX0OYQkPP058QXXtk8a6O4JbqO8,1967
+cachelib-0.6.0.dist-info/RECORD,,
+cachelib-0.6.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+cachelib-0.6.0.dist-info/top_level.txt,sha256=AYC4q8wgGd_hR_F2YcDkmtQm41gv9-5AThKuQtNPEXk,9
+cachelib/__init__.py,sha256=yn6sGua4H4ZzeH86AKK2gzIusIv3OcRb51gIEe_k2xo,447
+cachelib/__pycache__/__init__.cpython-39.pyc,,
+cachelib/__pycache__/base.cpython-39.pyc,,
+cachelib/__pycache__/file.cpython-39.pyc,,
+cachelib/__pycache__/memcached.cpython-39.pyc,,
+cachelib/__pycache__/redis.cpython-39.pyc,,
+cachelib/__pycache__/serializers.cpython-39.pyc,,
+cachelib/__pycache__/simple.cpython-39.pyc,,
+cachelib/__pycache__/uwsgi.cpython-39.pyc,,
+cachelib/base.py,sha256=iQ374pzY7MVjrC50LD24c788XWjwoGTpI1PtvXxqzbo,6713
+cachelib/file.py,sha256=0Qxbb82kKTWzPgj3yjCUruJMhcX_-Wm-JKV39ij-EHM,11059
+cachelib/memcached.py,sha256=sR3-_z719MOOa4TGfCaVSCNTCbMC5pxfIOdWWUxKn0o,7147
+cachelib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cachelib/redis.py,sha256=2RXb7MFj1eMqo4bRIOxjp5sybS0aNxMebOu9kaMzzac,6155
+cachelib/serializers.py,sha256=kInSuUkqqfqDjnp1WjfMP09IEkbklL9q8GZrFmbFT5A,3204
+cachelib/simple.py,sha256=q5j5WDwOFPdAgJI6wcj4LnFCaix3jUB0sDVuLO-wsWY,3481
+cachelib/uwsgi.py,sha256=4DX3C9QGvB6mVcg1d7qpLIEkI6bccuq-8M6I_YbPicY,2563

+ 5 - 0
Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+

+ 1 - 0
Backend/venv/Lib/site-packages/cachelib-0.6.0.dist-info/top_level.txt

@@ -0,0 +1 @@
+cachelib

+ 18 - 0
Backend/venv/Lib/site-packages/cachelib/__init__.py

@@ -0,0 +1,18 @@
+from cachelib.base import BaseCache
+from cachelib.base import NullCache
+from cachelib.file import FileSystemCache
+from cachelib.memcached import MemcachedCache
+from cachelib.redis import RedisCache
+from cachelib.simple import SimpleCache
+from cachelib.uwsgi import UWSGICache
+
+__all__ = [
+    "BaseCache",
+    "NullCache",
+    "SimpleCache",
+    "FileSystemCache",
+    "MemcachedCache",
+    "RedisCache",
+    "UWSGICache",
+]
+__version__ = "0.6.0"

+ 185 - 0
Backend/venv/Lib/site-packages/cachelib/base.py

@@ -0,0 +1,185 @@
+import typing as _t
+
+
+class BaseCache:
+    """Baseclass for the cache systems.  All the cache systems implement this
+    API or a superset of it.
+
+    :param default_timeout: the default timeout (in seconds) that is used if
+                            no timeout is specified on :meth:`set`. A timeout
+                            of 0 indicates that the cache never expires.
+    """
+
+    def __init__(self, default_timeout: int = 300):
+        self.default_timeout = default_timeout
+
+    def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
+        if timeout is None:
+            timeout = self.default_timeout
+        return timeout
+
+    def get(self, key: str) -> _t.Any:
+        """Look up key in the cache and return the value for it.
+
+        :param key: the key to be looked up.
+        :returns: The value if it exists and is readable, else ``None``.
+        """
+        return None
+
+    def delete(self, key: str) -> bool:
+        """Delete `key` from the cache.
+
+        :param key: the key to delete.
+        :returns: Whether the key existed and has been deleted.
+        :rtype: boolean
+        """
+        return True
+
+    def get_many(self, *keys: str) -> _t.List[_t.Any]:
+        """Returns a list of values for the given keys.
+        For each key an item in the list is created::
+
+            foo, bar = cache.get_many("foo", "bar")
+
+        Has the same error handling as :meth:`get`.
+
+        :param keys: The function accepts multiple keys as positional
+                     arguments.
+        """
+        return [self.get(k) for k in keys]
+
+    def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]:
+        """Like :meth:`get_many` but return a dict::
+
+            d = cache.get_dict("foo", "bar")
+            foo = d["foo"]
+            bar = d["bar"]
+
+        :param keys: The function accepts multiple keys as positional
+                     arguments.
+        """
+        return dict(zip(keys, self.get_many(*keys)))
+
+    def set(
+        self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
+    ) -> _t.Optional[bool]:
+        """Add a new key/value to the cache (overwrites value, if key already
+        exists in the cache).
+
+        :param key: the key to set
+        :param value: the value for the key
+        :param timeout: the cache timeout for the key in seconds (if not
+                        specified, it uses the default timeout). A timeout of
+                        0 indicates that the cache never expires.
+        :returns: ``True`` if key has been updated, ``False`` for backend
+                  errors. Pickling errors, however, will raise a subclass of
+                  ``pickle.PickleError``.
+        :rtype: boolean
+        """
+        return True
+
+    def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
+        """Works like :meth:`set` but does not overwrite the values of already
+        existing keys.
+
+        :param key: the key to set
+        :param value: the value for the key
+        :param timeout: the cache timeout for the key in seconds (if not
+                        specified, it uses the default timeout). A timeout of
+                        0 indicates that the cache never expires.
+        :returns: Same as :meth:`set`, but also ``False`` for already
+                  existing keys.
+        :rtype: boolean
+        """
+        return True
+
+    def set_many(
+        self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
+    ) -> _t.List[_t.Any]:
+        """Sets multiple keys and values from a mapping.
+
+        :param mapping: a mapping with the keys/values to set.
+        :param timeout: the cache timeout for the key in seconds (if not
+                        specified, it uses the default timeout). A timeout of
+                        0 indicates that the cache never expires.
+        :returns: A list containing all keys sucessfuly set
+        :rtype: boolean
+        """
+        set_keys = []
+        for key, value in mapping.items():
+            if self.set(key, value, timeout):
+                set_keys.append(key)
+        return set_keys
+
+    def delete_many(self, *keys: str) -> _t.List[_t.Any]:
+        """Deletes multiple keys at once.
+
+        :param keys: The function accepts multiple keys as positional
+                     arguments.
+        :returns: A list containing all sucessfuly deleted keys
+        :rtype: boolean
+        """
+        deleted_keys = []
+        for key in keys:
+            if self.delete(key):
+                deleted_keys.append(key)
+        return deleted_keys
+
+    def has(self, key: str) -> bool:
+        """Checks if a key exists in the cache without returning it. This is a
+        cheap operation that bypasses loading the actual data on the backend.
+
+        :param key: the key to check
+        """
+        raise NotImplementedError(
+            "%s doesn't have an efficient implementation of `has`. That "
+            "means it is impossible to check whether a key exists without "
+            "fully loading the key's data. Consider using `self.get` "
+            "explicitly if you don't care about performance."
+        )
+
+    def clear(self) -> bool:
+        """Clears the cache.  Keep in mind that not all caches support
+        completely clearing the cache.
+
+        :returns: Whether the cache has been cleared.
+        :rtype: boolean
+        """
+        return True
+
+    def inc(self, key: str, delta: int = 1) -> _t.Optional[int]:
+        """Increments the value of a key by `delta`.  If the key does
+        not yet exist it is initialized with `delta`.
+
+        For supporting caches this is an atomic operation.
+
+        :param key: the key to increment.
+        :param delta: the delta to add.
+        :returns: The new value or ``None`` for backend errors.
+        """
+        value = (self.get(key) or 0) + delta
+        return value if self.set(key, value) else None
+
+    def dec(self, key: str, delta: int = 1) -> _t.Optional[int]:
+        """Decrements the value of a key by `delta`.  If the key does
+        not yet exist it is initialized with `-delta`.
+
+        For supporting caches this is an atomic operation.
+
+        :param key: the key to increment.
+        :param delta: the delta to subtract.
+        :returns: The new value or `None` for backend errors.
+        """
+        value = (self.get(key) or 0) - delta
+        return value if self.set(key, value) else None
+
+
+class NullCache(BaseCache):
+    """A cache that doesn't cache.  This can be useful for unit testing.
+
+    :param default_timeout: a dummy parameter that is ignored but exists
+                            for API compatibility with other caches.
+    """
+
+    def has(self, key: str) -> bool:
+        return False

+ 319 - 0
Backend/venv/Lib/site-packages/cachelib/file.py

@@ -0,0 +1,319 @@
+import errno
+import logging
+import os
+import platform
+import tempfile
+import typing as _t
+from contextlib import contextmanager
+from hashlib import md5
+from pathlib import Path
+from time import sleep
+from time import time
+
+from cachelib.base import BaseCache
+from cachelib.serializers import FileSystemSerializer
+
+
+class FileSystemCache(BaseCache):
+    """A cache that stores the items on the file system.  This cache depends
+    on being the only user of the `cache_dir`.  Make absolutely sure that
+    nobody but this cache stores files there or otherwise the cache will
+    randomly delete files therein.
+
+    :param cache_dir: the directory where cache files are stored.
+    :param threshold: the maximum number of items the cache stores before
+                      it starts deleting some. A threshold value of 0
+                      indicates no threshold.
+    :param default_timeout: the default timeout that is used if no timeout is
+                            specified on :meth:`~BaseCache.set`. A timeout of
+                            0 indicates that the cache never expires.
+    :param mode: the file mode wanted for the cache files, default 0600
+    :param hash_method: Default hashlib.md5. The hash method used to
+                        generate the filename for cached results.
+    """
+
+    #: used for temporary files by the FileSystemCache
+    _fs_transaction_suffix = ".__wz_cache"
+    #: keep amount of files in a cache element
+    _fs_count_file = "__wz_cache_count"
+
+    serializer = FileSystemSerializer()
+
+    def __init__(
+        self,
+        cache_dir: str,
+        threshold: int = 500,
+        default_timeout: int = 300,
+        mode: int = 0o600,
+        hash_method: _t.Any = md5,
+    ):
+        BaseCache.__init__(self, default_timeout)
+        self._path = cache_dir
+        self._threshold = threshold
+        self._hash_method = hash_method
+        self._mode = mode
+
+        try:
+            os.makedirs(self._path)
+        except OSError as ex:
+            if ex.errno != errno.EEXIST:
+                raise
+
+        # If there are many files and a zero threshold,
+        # the list_dir can slow initialisation massively
+        if self._threshold != 0:
+            self._update_count(value=len(list(self._list_dir())))
+
+    @property
+    def _file_count(self) -> int:
+        return self.get(self._fs_count_file) or 0
+
+    def _update_count(
+        self, delta: _t.Optional[int] = None, value: _t.Optional[int] = None
+    ) -> None:
+        # If we have no threshold, don't count files
+        if self._threshold == 0:
+            return
+        if delta:
+            new_count = self._file_count + delta
+        else:
+            new_count = value or 0
+        self.set(self._fs_count_file, new_count, mgmt_element=True)
+
+    def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
+        timeout = BaseCache._normalize_timeout(self, timeout)
+        if timeout != 0:
+            timeout = int(time()) + timeout
+        return int(timeout)
+
+    def _is_mgmt(self, name: str) -> bool:
+        fshash = self._get_filename(self._fs_count_file).split(os.sep)[-1]
+        return name == fshash or name.endswith(self._fs_transaction_suffix)
+
+    def _list_dir(self) -> _t.Generator[str, None, None]:
+        """return a list of (fully qualified) cache filenames"""
+        return (
+            os.path.join(self._path, fn)
+            for fn in os.listdir(self._path)
+            if not self._is_mgmt(fn)
+        )
+
+    def _over_threshold(self) -> bool:
+        return self._threshold != 0 and self._file_count > self._threshold
+
+    def _remove_expired(self, now: float) -> None:
+        for fname in self._list_dir():
+            try:
+                with self._safe_stream_open(fname, "rb") as f:
+                    expires = self.serializer.load(f)
+                if expires != 0 and expires < now:
+                    os.remove(fname)
+                    self._update_count(delta=-1)
+            except FileNotFoundError:
+                pass
+            except (OSError, EOFError):
+                logging.warning(
+                    "Exception raised while handling cache file '%s'",
+                    fname,
+                    exc_info=True,
+                )
+
+    def _remove_older(self) -> bool:
+        exp_fname_tuples = []
+        for fname in self._list_dir():
+            try:
+                with self._safe_stream_open(fname, "rb") as f:
+                    exp_fname_tuples.append((self.serializer.load(f), fname))
+            except FileNotFoundError:
+                pass
+            except (OSError, EOFError):
+                logging.warning(
+                    "Exception raised while handling cache file '%s'",
+                    fname,
+                    exc_info=True,
+                )
+        fname_sorted = (
+            fname
+            for _, fname in sorted(
+                exp_fname_tuples, key=lambda item: item[0]  # type: ignore
+            )
+        )
+        for fname in fname_sorted:
+            try:
+                os.remove(fname)
+                self._update_count(delta=-1)
+            except FileNotFoundError:
+                pass
+            except OSError:
+                logging.warning(
+                    "Exception raised while handling cache file '%s'",
+                    fname,
+                    exc_info=True,
+                )
+                return False
+            if not self._over_threshold():
+                break
+        return True
+
+    def _prune(self) -> None:
+        if self._over_threshold():
+            now = time()
+            self._remove_expired(now)
+        # if still over threshold
+        if self._over_threshold():
+            self._remove_older()
+
+    def clear(self) -> bool:
+        for i, fname in enumerate(self._list_dir()):
+            try:
+                os.remove(fname)
+            except FileNotFoundError:
+                pass
+            except OSError:
+                logging.warning(
+                    "Exception raised while handling cache file '%s'",
+                    fname,
+                    exc_info=True,
+                )
+                self._update_count(delta=-i)
+                return False
+        self._update_count(value=0)
+        return True
+
+    def _get_filename(self, key: str) -> str:
+        if isinstance(key, str):
+            bkey = key.encode("utf-8")  # XXX unicode review
+            bkey_hash = self._hash_method(bkey).hexdigest()
+        return os.path.join(self._path, bkey_hash)
+
+    def get(self, key: str) -> _t.Any:
+        filename = self._get_filename(key)
+        try:
+            with self._safe_stream_open(filename, "rb") as f:
+                pickle_time = self.serializer.load(f)
+                if pickle_time == 0 or pickle_time >= time():
+                    return self.serializer.load(f)
+        except FileNotFoundError:
+            pass
+        except (OSError, EOFError):
+            logging.warning(
+                "Exception raised while handling cache file '%s'",
+                filename,
+                exc_info=True,
+            )
+        return None
+
+    def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
+        filename = self._get_filename(key)
+        if not os.path.exists(filename):
+            return self.set(key, value, timeout)
+        return False
+
+    def set(
+        self,
+        key: str,
+        value: _t.Any,
+        timeout: _t.Optional[int] = None,
+        mgmt_element: bool = False,
+    ) -> bool:
+        # Management elements have no timeout
+        if mgmt_element:
+            timeout = 0
+        # Don't prune on management element update, to avoid loop
+        else:
+            self._prune()
+
+        timeout = self._normalize_timeout(timeout)
+        filename = self._get_filename(key)
+        overwrite = os.path.isfile(filename)
+
+        try:
+            fd, tmp = tempfile.mkstemp(
+                suffix=self._fs_transaction_suffix, dir=self._path
+            )
+            with os.fdopen(fd, "wb") as f:
+                self.serializer.dump(timeout, f)  # this returns bool
+                self.serializer.dump(value, f)
+
+            self._run_safely(os.replace, tmp, filename)
+            self._run_safely(os.chmod, filename, self._mode)
+
+            fsize = Path(filename).stat().st_size
+        except OSError:
+            logging.warning(
+                "Exception raised while handling cache file '%s'",
+                filename,
+                exc_info=True,
+            )
+            return False
+        else:
+            # Management elements should not count towards threshold
+            if not overwrite and not mgmt_element:
+                self._update_count(delta=1)
+            return fsize > 0  # function should fail if file is empty
+
+    def delete(self, key: str, mgmt_element: bool = False) -> bool:
+        try:
+            os.remove(self._get_filename(key))
+        except FileNotFoundError:  # if file doesn't exist we consider it deleted
+            return True
+        except OSError:
+            logging.warning("Exception raised while handling cache file", exc_info=True)
+            return False
+        else:
+            # Management elements should not count towards threshold
+            if not mgmt_element:
+                self._update_count(delta=-1)
+            return True
+
+    def has(self, key: str) -> bool:
+        filename = self._get_filename(key)
+        try:
+            with self._safe_stream_open(filename, "rb") as f:
+                pickle_time = self.serializer.load(f)
+                if pickle_time == 0 or pickle_time >= time():
+                    return True
+                else:
+                    return False
+        except FileNotFoundError:  # if there is no file there is no key
+            return False
+        except (OSError, EOFError):
+            logging.warning(
+                "Exception raised while handling cache file '%s'",
+                filename,
+                exc_info=True,
+            )
+            return False
+
+    def _run_safely(self, fn: _t.Callable, *args: _t.Any, **kwargs: _t.Any) -> _t.Any:
+        """On Windows os.replace, os.chmod and open can yield
+        permission errors if executed by two different processes."""
+        if platform.system() == "Windows":
+            output = None
+            wait_step = 0.001
+            max_sleep_time = 10.0
+            total_sleep_time = 0.0
+
+            while total_sleep_time < max_sleep_time:
+                try:
+                    output = fn(*args, **kwargs)
+                except PermissionError:
+                    sleep(wait_step)
+                    total_sleep_time += wait_step
+                    wait_step *= 2
+                else:
+                    break
+        else:
+            output = fn(*args, **kwargs)
+
+        return output
+
+    @contextmanager
+    def _safe_stream_open(self, path: str, mode: str) -> _t.Generator:
+        fs = self._run_safely(open, path, mode)
+        if fs is None:
+            raise OSError
+        try:
+            yield fs
+        finally:
+            fs.close()

+ 197 - 0
Backend/venv/Lib/site-packages/cachelib/memcached.py

@@ -0,0 +1,197 @@
+import re
+import typing as _t
+from time import time
+
+from cachelib.base import BaseCache
+
+
+_test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match
+
+
+class MemcachedCache(BaseCache):
+
+    """A cache that uses memcached as backend.
+
+    The first argument can either be an object that resembles the API of a
+    :class:`memcache.Client` or a tuple/list of server addresses. In the
+    event that a tuple/list is passed, Werkzeug tries to import the best
+    available memcache library.
+
+    This cache looks into the following packages/modules to find bindings for
+    memcached:
+
+        - ``pylibmc``
+        - ``google.appengine.api.memcached``
+        - ``memcached``
+        - ``libmc``
+
+    Implementation notes:  This cache backend works around some limitations in
+    memcached to simplify the interface.  For example unicode keys are encoded
+    to utf-8 on the fly.  Methods such as :meth:`~BaseCache.get_dict` return
+    the keys in the same format as passed.  Furthermore all get methods
+    silently ignore key errors to not cause problems when untrusted user data
+    is passed to the get methods which is often the case in web applications.
+
+    :param servers: a list or tuple of server addresses or alternatively
+                    a :class:`memcache.Client` or a compatible client.
+    :param default_timeout: the default timeout that is used if no timeout is
+                            specified on :meth:`~BaseCache.set`. A timeout of
+                            0 indicates that the cache never expires.
+    :param key_prefix: a prefix that is added before all keys.  This makes it
+                       possible to use the same memcached server for different
+                       applications.  Keep in mind that
+                       :meth:`~BaseCache.clear` will also clear keys with a
+                       different prefix.
+    """
+
+    def __init__(
+        self,
+        servers: _t.Any = None,
+        default_timeout: int = 300,
+        key_prefix: _t.Optional[str] = None,
+    ):
+        BaseCache.__init__(self, default_timeout)
+        if servers is None or isinstance(servers, (list, tuple)):
+            if servers is None:
+                servers = ["127.0.0.1:11211"]
+            self._client = self.import_preferred_memcache_lib(servers)
+            if self._client is None:
+                raise RuntimeError("no memcache module found")
+        else:
+            # NOTE: servers is actually an already initialized memcache
+            # client.
+            self._client = servers
+
+        self.key_prefix = key_prefix
+
+    def _normalize_key(self, key: str) -> str:
+        if self.key_prefix:
+            key = self.key_prefix + key
+        return key
+
+    def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
+        timeout = BaseCache._normalize_timeout(self, timeout)
+        if timeout > 0:
+            timeout = int(time()) + timeout
+        return timeout
+
+    def get(self, key: str) -> _t.Any:
+        key = self._normalize_key(key)
+        # memcached doesn't support keys longer than that.  Because often
+        # checks for so long keys can occur because it's tested from user
+        # submitted data etc we fail silently for getting.
+        if _test_memcached_key(key):
+            return self._client.get(key)
+
+    def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]:
+        key_mapping = {}
+        for key in keys:
+            encoded_key = self._normalize_key(key)
+            if _test_memcached_key(key):
+                key_mapping[encoded_key] = key
+        _keys = list(key_mapping)
+        d = rv = self._client.get_multi(_keys)  # type: _t.Dict[str, _t.Any]
+        if self.key_prefix:
+            rv = {}
+            for key, value in d.items():
+                rv[key_mapping[key]] = value
+        if len(rv) < len(keys):
+            for key in keys:
+                if key not in rv:
+                    rv[key] = None
+        return rv
+
+    def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
+        key = self._normalize_key(key)
+        timeout = self._normalize_timeout(timeout)
+        return bool(self._client.add(key, value, timeout))
+
+    def set(
+        self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
+    ) -> _t.Optional[bool]:
+        key = self._normalize_key(key)
+        timeout = self._normalize_timeout(timeout)
+        return bool(self._client.set(key, value, timeout))
+
+    def get_many(self, *keys: str) -> _t.List[_t.Any]:
+        d = self.get_dict(*keys)
+        return [d[key] for key in keys]
+
+    def set_many(
+        self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
+    ) -> _t.List[_t.Any]:
+        new_mapping = {}
+        for key, value in mapping.items():
+            key = self._normalize_key(key)
+            new_mapping[key] = value
+
+        timeout = self._normalize_timeout(timeout)
+        failed_keys = self._client.set_multi(
+            new_mapping, timeout
+        )  # type: _t.List[_t.Any]
+        k_normkey = zip(mapping.keys(), new_mapping.keys())
+        return [k for k, nkey in k_normkey if nkey not in failed_keys]
+
+    def delete(self, key: str) -> bool:
+        key = self._normalize_key(key)
+        if _test_memcached_key(key):
+            return bool(self._client.delete(key))
+        return False
+
+    def delete_many(self, *keys: str) -> _t.List[_t.Any]:
+        new_keys = []
+        for key in keys:
+            key = self._normalize_key(key)
+            if _test_memcached_key(key):
+                new_keys.append(key)
+        self._client.delete_multi(new_keys)
+        return [k for k in new_keys if not self.has(k)]
+
+    def has(self, key: str) -> bool:
+        key = self._normalize_key(key)
+        if _test_memcached_key(key):
+            return bool(self._client.append(key, ""))
+        return False
+
+    def clear(self) -> bool:
+        return bool(self._client.flush_all())
+
+    def inc(self, key: str, delta: int = 1) -> _t.Optional[int]:
+        key = self._normalize_key(key)
+        value = (self._client.get(key) or 0) + delta
+        return value if self.set(key, value) else None
+
+    def dec(self, key: str, delta: int = 1) -> _t.Optional[int]:
+        key = self._normalize_key(key)
+        value = (self._client.get(key) or 0) - delta
+        return value if self.set(key, value) else None
+
+    def import_preferred_memcache_lib(self, servers: _t.Any) -> _t.Any:
+        """Returns an initialized memcache client.  Used by the constructor."""
+        try:
+            import pylibmc  # type: ignore
+        except ImportError:
+            pass
+        else:
+            return pylibmc.Client(servers)
+
+        try:
+            from google.appengine.api import memcache  # type: ignore
+        except ImportError:
+            pass
+        else:
+            return memcache.Client()
+
+        try:
+            import memcache  # type: ignore
+        except ImportError:
+            pass
+        else:
+            return memcache.Client(servers)
+
+        try:
+            import libmc  # type: ignore
+        except ImportError:
+            pass
+        else:
+            return libmc.Client(servers)

+ 0 - 0
Backend/venv/Lib/site-packages/cachelib/py.typed


+ 160 - 0
Backend/venv/Lib/site-packages/cachelib/redis.py

@@ -0,0 +1,160 @@
+import typing as _t
+import warnings
+
+from cachelib.base import BaseCache
+from cachelib.serializers import RedisSerializer
+
+
+class RedisCache(BaseCache):
+    """Uses the Redis key-value store as a cache backend.
+
+    The first argument can be either a string denoting address of the Redis
+    server or an object resembling an instance of a redis.Redis class.
+
+    Note: Python Redis API already takes care of encoding unicode strings on
+    the fly.
+
+    :param host: address of the Redis server or an object which API is
+                 compatible with the official Python Redis client (redis-py).
+    :param port: port number on which Redis server listens for connections.
+    :param password: password authentication for the Redis server.
+    :param db: db (zero-based numeric index) on Redis Server to connect.
+    :param default_timeout: the default timeout that is used if no timeout is
+                            specified on :meth:`~BaseCache.set`. A timeout of
+                            0 indicates that the cache never expires.
+    :param key_prefix: A prefix that should be added to all keys.
+
+    Any additional keyword arguments will be passed to ``redis.Redis``.
+    """
+
+    serializer = RedisSerializer()
+
+    def __init__(
+        self,
+        host: _t.Any = "localhost",
+        port: int = 6379,
+        password: _t.Optional[str] = None,
+        db: int = 0,
+        default_timeout: int = 300,
+        key_prefix: _t.Optional[str] = None,
+        **kwargs: _t.Any
+    ):
+        BaseCache.__init__(self, default_timeout)
+        if host is None:
+            raise ValueError("RedisCache host parameter may not be None")
+        if isinstance(host, str):
+            try:
+                import redis
+            except ImportError as err:
+                raise RuntimeError("no redis module found") from err
+            if kwargs.get("decode_responses", None):
+                raise ValueError("decode_responses is not supported by RedisCache.")
+            self._client = redis.Redis(
+                host=host, port=port, password=password, db=db, **kwargs
+            )
+        else:
+            self._client = host
+        self.key_prefix = key_prefix or ""
+
+    def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
+        timeout = BaseCache._normalize_timeout(self, timeout)
+        if timeout == 0:
+            timeout = -1
+        return timeout
+
+    def dump_object(self, value: _t.Any) -> bytes:
+        warnings.warn(
+            "'dump_object' is deprecated and will be removed in the future."
+            "This is a proxy call to 'RedisCache.serializer.dumps'",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return self.serializer.dumps(value)
+
+    def load_object(self, value: _t.Any) -> _t.Any:
+        warnings.warn(
+            "'load_object' is deprecated and will be removed in the future."
+            "This is a proxy call to 'RedisCache.serializer.loads'",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return self.serializer.loads(value)
+
+    def get(self, key: str) -> _t.Any:
+        return self.load_object(self._client.get(self.key_prefix + key))
+
+    def get_many(self, *keys: str) -> _t.List[_t.Any]:
+        if self.key_prefix:
+            prefixed_keys = [self.key_prefix + key for key in keys]
+        else:
+            prefixed_keys = [k for k in keys]
+        return [self.load_object(x) for x in self._client.mget(prefixed_keys)]
+
+    def set(
+        self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
+    ) -> _t.Optional[bool]:
+        timeout = self._normalize_timeout(timeout)
+        dump = self.dump_object(value)
+        if timeout == -1:
+            result = self._client.set(name=self.key_prefix + key, value=dump)
+        else:
+            result = self._client.setex(
+                name=self.key_prefix + key, value=dump, time=timeout
+            )
+        return result
+
+    def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
+        timeout = self._normalize_timeout(timeout)
+        dump = self.dump_object(value)
+        return self._client.setnx(
+            name=self.key_prefix + key, value=dump
+        ) and self._client.expire(name=self.key_prefix + key, time=timeout)
+
+    def set_many(
+        self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
+    ) -> _t.List[_t.Any]:
+        timeout = self._normalize_timeout(timeout)
+        # Use transaction=False to batch without calling redis MULTI
+        # which is not supported by twemproxy
+        pipe = self._client.pipeline(transaction=False)
+
+        for key, value in mapping.items():
+            dump = self.dump_object(value)
+            if timeout == -1:
+                pipe.set(name=self.key_prefix + key, value=dump)
+            else:
+                pipe.setex(name=self.key_prefix + key, value=dump, time=timeout)
+        results = pipe.execute()
+        return [k for k, was_set in zip(mapping.keys(), results) if was_set]
+
+    def delete(self, key: str) -> bool:
+        return bool(self._client.delete(self.key_prefix + key))
+
+    def delete_many(self, *keys: str) -> _t.List[_t.Any]:
+        if not keys:
+            return []
+        if self.key_prefix:
+            prefixed_keys = [self.key_prefix + key for key in keys]
+        else:
+            prefixed_keys = [k for k in keys]
+        self._client.delete(*prefixed_keys)
+        return [k for k in prefixed_keys if not self.has(k)]
+
+    def has(self, key: str) -> bool:
+        return bool(self._client.exists(self.key_prefix + key))
+
+    def clear(self) -> bool:
+        status = 0
+        if self.key_prefix:
+            keys = self._client.keys(self.key_prefix + "*")
+            if keys:
+                status = self._client.delete(*keys)
+        else:
+            status = self._client.flushdb()
+        return bool(status)
+
+    def inc(self, key: str, delta: int = 1) -> _t.Optional[int]:
+        return self._client.incr(name=self.key_prefix + key, amount=delta)
+
+    def dec(self, key: str, delta: int = 1) -> _t.Optional[int]:
+        return self._client.incr(name=self.key_prefix + key, amount=-delta)

+ 105 - 0
Backend/venv/Lib/site-packages/cachelib/serializers.py

@@ -0,0 +1,105 @@
+import logging
+import pickle
+import typing as _t
+
+
+class BaseSerializer:
+    """This is the base interface for all default serializers.
+
+    BaseSerializer.load and BaseSerializer.dump will
+    default to pickle.load and pickle.dump. This is currently
+    used only by FileSystemCache which dumps/loads to/from a file stream.
+    """
+
+    def _warn(self, e: pickle.PickleError) -> None:
+        logging.warning(
+            f"An exception has been raised during a pickling operation: {e}"
+        )
+
+    def dump(
+        self, value: int, f: _t.IO, protocol: int = pickle.HIGHEST_PROTOCOL
+    ) -> None:
+        try:
+            pickle.dump(value, f, protocol)
+        except (pickle.PickleError, pickle.PicklingError) as e:
+            self._warn(e)
+
+    def load(self, f: _t.BinaryIO) -> _t.Any:
+        try:
+            data = pickle.load(f)
+        except pickle.PickleError as e:
+            self._warn(e)
+            return None
+        else:
+            return data
+
+    """BaseSerializer.loads and BaseSerializer.dumps
+    work on top of pickle.loads and pickle.dumps. Dumping/loading
+    strings and byte strings is the default for most cache types.
+    """
+
+    def dumps(self, value: _t.Any, protocol: int = pickle.HIGHEST_PROTOCOL) -> bytes:
+        try:
+            serialized = pickle.dumps(value, protocol)
+        except (pickle.PickleError, pickle.PicklingError) as e:
+            self._warn(e)
+        return serialized
+
+    def loads(self, bvalue: bytes) -> _t.Any:
+        try:
+            data = pickle.loads(bvalue)
+        except pickle.PickleError as e:
+            self._warn(e)
+            return None
+        else:
+            return data
+
+
+"""Default serializers for each cache type.
+
+The following classes can be used to further customize
+serialiation behaviour. Alternatively, any serializer can be
+overriden in order to use a custom serializer with a different
+strategy altogether.
+"""
+
+
+class UWSGISerializer(BaseSerializer):
+    """Default serializer for UWSGICache."""
+
+
+class SimpleSerializer(BaseSerializer):
+    """Default serializer for SimpleCache."""
+
+
+class FileSystemSerializer(BaseSerializer):
+    """Default serializer for FileSystemCache."""
+
+
+class RedisSerializer(BaseSerializer):
+    """Default serializer for RedisCache."""
+
+    def dumps(self, value: _t.Any, protocol: int = pickle.HIGHEST_PROTOCOL) -> bytes:
+        """Dumps an object into a string for redis. By default it serializes
+        integers as regular string and pickle dumps everything else.
+        """
+        if isinstance(type(value), int):
+            return str(value).encode("ascii")
+        return b"!" + pickle.dumps(value, protocol)
+
+    def loads(self, value: _t.Optional[bytes]) -> _t.Any:
+        """The reversal of :meth:`dump_object`. This might be called with
+        None.
+        """
+        if value is None:
+            return None
+        if value.startswith(b"!"):
+            try:
+                return pickle.loads(value[1:])
+            except pickle.PickleError:
+                return None
+        try:
+            return int(value)
+        except ValueError:
+            # before 0.8 we did not have serialization. Still support that.
+            return value

+ 104 - 0
Backend/venv/Lib/site-packages/cachelib/simple.py

@@ -0,0 +1,104 @@
+import typing as _t
+from time import time
+
+from cachelib.base import BaseCache
+from cachelib.serializers import SimpleSerializer
+
+
+class SimpleCache(BaseCache):
+
+    """Simple memory cache for single process environments.  This class exists
+    mainly for the development server and is not 100% thread safe.  It tries
+    to use as many atomic operations as possible and no locks for simplicity
+    but it could happen under heavy load that keys are added multiple times.
+
+    :param threshold: the maximum number of items the cache stores before
+                      it starts deleting some.
+    :param default_timeout: the default timeout that is used if no timeout is
+                            specified on :meth:`~BaseCache.set`. A timeout of
+                            0 indicates that the cache never expires.
+    """
+
+    serializer = SimpleSerializer()
+
+    def __init__(
+        self,
+        threshold: int = 500,
+        default_timeout: int = 300,
+    ):
+        BaseCache.__init__(self, default_timeout)
+        self._cache: _t.Dict[str, _t.Any] = {}
+        self._threshold = threshold or 500  # threshold = 0
+
+    def _over_threshold(self) -> bool:
+        return len(self._cache) > self._threshold
+
+    def _remove_expired(self, now: float) -> None:
+        toremove = [k for k, (expires, _) in self._cache.items() if expires < now]
+        for k in toremove:
+            self._cache.pop(k, None)
+
+    def _remove_older(self) -> None:
+        k_ordered = (
+            k
+            for k, v in sorted(
+                self._cache.items(), key=lambda item: item[1][0]  # type: ignore
+            )
+        )
+        for k in k_ordered:
+            self._cache.pop(k, None)
+            if not self._over_threshold():
+                break
+
+    def _prune(self) -> None:
+        if self._over_threshold():
+            now = time()
+            self._remove_expired(now)
+        # remove older items if still over threshold
+        if self._over_threshold():
+            self._remove_older()
+
+    def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
+        timeout = BaseCache._normalize_timeout(self, timeout)
+        if timeout > 0:
+            timeout = int(time()) + timeout
+        return timeout
+
+    def get(self, key: str) -> _t.Any:
+        try:
+            expires, value = self._cache[key]
+            if expires == 0 or expires > time():
+                return self.serializer.loads(value)
+        except KeyError:
+            return None
+
+    def set(
+        self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
+    ) -> _t.Optional[bool]:
+        expires = self._normalize_timeout(timeout)
+        self._prune()
+        self._cache[key] = (expires, self.serializer.dumps(value))
+        return True
+
+    def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
+        expires = self._normalize_timeout(timeout)
+        self._prune()
+        item = (expires, self.serializer.dumps(value))
+        if key in self._cache:
+            return False
+        self._cache.setdefault(key, item)
+        return True
+
+    def delete(self, key: str) -> bool:
+        return self._cache.pop(key, None) is not None
+
+    def has(self, key: str) -> bool:
+        try:
+            expires, value = self._cache[key]
+            return bool(expires == 0 or expires > time())
+        except KeyError:
+            return False
+
+    def clear(self) -> bool:
+        self._cache.clear()
+        return not bool(self._cache)

+ 83 - 0
Backend/venv/Lib/site-packages/cachelib/uwsgi.py

@@ -0,0 +1,83 @@
+import platform
+import typing as _t
+
+from cachelib.base import BaseCache
+from cachelib.serializers import UWSGISerializer
+
+
+class UWSGICache(BaseCache):
+    """Implements the cache using uWSGI's caching framework.
+
+    .. note::
+        This class cannot be used when running under PyPy, because the uWSGI
+        API implementation for PyPy is lacking the needed functionality.
+
+    :param default_timeout: The default timeout in seconds.
+    :param cache: The name of the caching instance to connect to, for
+        example: mycache@localhost:3031, defaults to an empty string, which
+        means uWSGI will cache in the local instance. If the cache is in the
+        same instance as the werkzeug app, you only have to provide the name of
+        the cache.
+    """
+
+    serializer = UWSGISerializer()
+
+    def __init__(
+        self,
+        default_timeout: int = 300,
+        cache: str = "",
+    ):
+        BaseCache.__init__(self, default_timeout)
+
+        if platform.python_implementation() == "PyPy":
+            raise RuntimeError(
+                "uWSGI caching does not work under PyPy, see "
+                "the docs for more details."
+            )
+
+        try:
+            import uwsgi  # type: ignore
+
+            self._uwsgi = uwsgi
+        except ImportError as err:
+            raise RuntimeError(
+                "uWSGI could not be imported, are you running under uWSGI?"
+            ) from err
+
+        self.cache = cache
+
+    def get(self, key: str) -> _t.Any:
+        rv = self._uwsgi.cache_get(key, self.cache)
+        if rv is None:
+            return
+        return self.serializer.loads(rv)
+
+    def delete(self, key: str) -> bool:
+        return bool(self._uwsgi.cache_del(key, self.cache))
+
+    def set(
+        self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
+    ) -> _t.Optional[bool]:
+        result = self._uwsgi.cache_update(
+            key,
+            self.serializer.dumps(value),
+            self._normalize_timeout(timeout),
+            self.cache,
+        )  # type: bool
+        return result
+
+    def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
+        return bool(
+            self._uwsgi.cache_set(
+                key,
+                self.serializer.dumps(value),
+                self._normalize_timeout(timeout),
+                self.cache,
+            )
+        )
+
+    def clear(self) -> bool:
+        return bool(self._uwsgi.cache_clear(self.cache))
+
+    def has(self, key: str) -> bool:
+        return self._uwsgi.cache_exists(key, self.cache) is not None

+ 109 - 0
Backend/venv/Lib/site-packages/flask_session/__init__.py

@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+"""
+    flask_session
+    ~~~~~~~~~~~~~
+
+    Adds server session support to your application.
+
+    :copyright: (c) 2014 by Shipeng Feng.
+    :license: BSD, see LICENSE for more details.
+"""
+
+__version__ = '0.4.0'
+
+import os
+
+from .sessions import NullSessionInterface, RedisSessionInterface, \
+    MemcachedSessionInterface, FileSystemSessionInterface, \
+    MongoDBSessionInterface, SqlAlchemySessionInterface
+
+
+class Session(object):
+    """This class is used to add Server-side Session to one or more Flask
+    applications.
+
+    There are two usage modes.  One is initialize the instance with a very
+    specific Flask application::
+
+        app = Flask(__name__)
+        Session(app)
+
+    The second possibility is to create the object once and configure the
+    application later::
+
+        sess = Session()
+
+        def create_app():
+            app = Flask(__name__)
+            sess.init_app(app)
+            return app
+
+    By default Flask-Session will use :class:`NullSessionInterface`, you
+    really should configurate your app to use a different SessionInterface.
+
+    .. note::
+
+        You can not use ``Session`` instance directly, what ``Session`` does
+        is just change the :attr:`~flask.Flask.session_interface` attribute on
+        your Flask applications.
+    """
+
+    def __init__(self, app=None):
+        self.app = app
+        if app is not None:
+            self.init_app(app)
+
+    def init_app(self, app):
+        """This is used to set up session for your app object.
+
+        :param app: the Flask app object with proper configuration.
+        """
+        app.session_interface = self._get_interface(app)
+
+    def _get_interface(self, app):
+        config = app.config.copy()
+        config.setdefault('SESSION_TYPE', 'null')
+        config.setdefault('SESSION_PERMANENT', True)
+        config.setdefault('SESSION_USE_SIGNER', False)
+        config.setdefault('SESSION_KEY_PREFIX', 'session:')
+        config.setdefault('SESSION_REDIS', None)
+        config.setdefault('SESSION_MEMCACHED', None)
+        config.setdefault('SESSION_FILE_DIR',
+                          os.path.join(os.getcwd(), 'flask_session'))
+        config.setdefault('SESSION_FILE_THRESHOLD', 500)
+        config.setdefault('SESSION_FILE_MODE', 384)
+        config.setdefault('SESSION_MONGODB', None)
+        config.setdefault('SESSION_MONGODB_DB', 'flask_session')
+        config.setdefault('SESSION_MONGODB_COLLECT', 'sessions')
+        config.setdefault('SESSION_SQLALCHEMY', None)
+        config.setdefault('SESSION_SQLALCHEMY_TABLE', 'sessions')
+
+        if config['SESSION_TYPE'] == 'redis':
+            session_interface = RedisSessionInterface(
+                config['SESSION_REDIS'], config['SESSION_KEY_PREFIX'],
+                config['SESSION_USE_SIGNER'], config['SESSION_PERMANENT'])
+        elif config['SESSION_TYPE'] == 'memcached':
+            session_interface = MemcachedSessionInterface(
+                config['SESSION_MEMCACHED'], config['SESSION_KEY_PREFIX'],
+                config['SESSION_USE_SIGNER'], config['SESSION_PERMANENT'])
+        elif config['SESSION_TYPE'] == 'filesystem':
+            session_interface = FileSystemSessionInterface(
+                config['SESSION_FILE_DIR'], config['SESSION_FILE_THRESHOLD'],
+                config['SESSION_FILE_MODE'], config['SESSION_KEY_PREFIX'],
+                config['SESSION_USE_SIGNER'], config['SESSION_PERMANENT'])
+        elif config['SESSION_TYPE'] == 'mongodb':
+            session_interface = MongoDBSessionInterface(
+                config['SESSION_MONGODB'], config['SESSION_MONGODB_DB'],
+                config['SESSION_MONGODB_COLLECT'],
+                config['SESSION_KEY_PREFIX'], config['SESSION_USE_SIGNER'],
+                config['SESSION_PERMANENT'])
+        elif config['SESSION_TYPE'] == 'sqlalchemy':
+            session_interface = SqlAlchemySessionInterface(
+                app, config['SESSION_SQLALCHEMY'],
+                config['SESSION_SQLALCHEMY_TABLE'],
+                config['SESSION_KEY_PREFIX'], config['SESSION_USE_SIGNER'],
+                config['SESSION_PERMANENT'])
+        else:
+            session_interface = NullSessionInterface()
+
+        return session_interface

+ 586 - 0
Backend/venv/Lib/site-packages/flask_session/sessions.py

@@ -0,0 +1,586 @@
+# -*- coding: utf-8 -*-
+"""
+    flask_session.sessions
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Server-side Sessions and SessionInterfaces.
+
+    :copyright: (c) 2014 by Shipeng Feng.
+    :license: BSD, see LICENSE for more details.
+"""
+import sys
+import time
+from datetime import datetime
+from uuid import uuid4
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+from flask.sessions import SessionInterface as FlaskSessionInterface
+from flask.sessions import SessionMixin
+from werkzeug.datastructures import CallbackDict
+from itsdangerous import Signer, BadSignature, want_bytes
+
+
+PY2 = sys.version_info[0] == 2
+if not PY2:
+    text_type = str
+else:
+    text_type = unicode
+
+
+def total_seconds(td):
+    return td.days * 60 * 60 * 24 + td.seconds
+
+
+class ServerSideSession(CallbackDict, SessionMixin):
+    """Baseclass for server-side based sessions."""
+
+    def __init__(self, initial=None, sid=None, permanent=None):
+        def on_update(self):
+            self.modified = True
+        CallbackDict.__init__(self, initial, on_update)
+        self.sid = sid
+        if permanent:
+            self.permanent = permanent
+        self.modified = False
+
+
+class RedisSession(ServerSideSession):
+    pass
+
+
+class MemcachedSession(ServerSideSession):
+    pass
+
+
+class FileSystemSession(ServerSideSession):
+    pass
+
+
+class MongoDBSession(ServerSideSession):
+    pass
+
+
+class SqlAlchemySession(ServerSideSession):
+    pass
+
+
+class SessionInterface(FlaskSessionInterface):
+
+    def _generate_sid(self):
+        return str(uuid4())
+
+    def _get_signer(self, app):
+        if not app.secret_key:
+            return None
+        return Signer(app.secret_key, salt='flask-session',
+                      key_derivation='hmac')
+
+
+class NullSessionInterface(SessionInterface):
+    """Used to open a :class:`flask.sessions.NullSession` instance.
+    """
+
+    def open_session(self, app, request):
+        return None
+
+
+class RedisSessionInterface(SessionInterface):
+    """Uses the Redis key-value store as a session backend.
+
+    .. versionadded:: 0.2
+        The `use_signer` parameter was added.
+
+    :param redis: A ``redis.Redis`` instance.
+    :param key_prefix: A prefix that is added to all Redis store keys.
+    :param use_signer: Whether to sign the session id cookie or not.
+    :param permanent: Whether to use permanent session or not.
+    """
+
+    serializer = pickle
+    session_class = RedisSession
+
+    def __init__(self, redis, key_prefix, use_signer=False, permanent=True):
+        if redis is None:
+            from redis import Redis
+            redis = Redis()
+        self.redis = redis
+        self.key_prefix = key_prefix
+        self.use_signer = use_signer
+        self.permanent = permanent
+        self.has_same_site_capability = hasattr(self, "get_cookie_samesite")
+
+    def open_session(self, app, request):
+        sid = request.cookies.get(app.session_cookie_name)
+        if not sid:
+            sid = self._generate_sid()
+            return self.session_class(sid=sid, permanent=self.permanent)
+        if self.use_signer:
+            signer = self._get_signer(app)
+            if signer is None:
+                return None
+            try:
+                sid_as_bytes = signer.unsign(sid)
+                sid = sid_as_bytes.decode()
+            except BadSignature:
+                sid = self._generate_sid()
+                return self.session_class(sid=sid, permanent=self.permanent)
+
+        if not PY2 and not isinstance(sid, text_type):
+            sid = sid.decode('utf-8', 'strict')
+        val = self.redis.get(self.key_prefix + sid)
+        if val is not None:
+            try:
+                data = self.serializer.loads(val)
+                return self.session_class(data, sid=sid)
+            except:
+                return self.session_class(sid=sid, permanent=self.permanent)
+        return self.session_class(sid=sid, permanent=self.permanent)
+
+    def save_session(self, app, session, response):
+        domain = self.get_cookie_domain(app)
+        path = self.get_cookie_path(app)
+        if not session:
+            if session.modified:
+                self.redis.delete(self.key_prefix + session.sid)
+                response.delete_cookie(app.session_cookie_name,
+                                       domain=domain, path=path)
+            return
+
+        # Modification case.  There are upsides and downsides to
+        # emitting a set-cookie header each request.  The behavior
+        # is controlled by the :meth:`should_set_cookie` method
+        # which performs a quick check to figure out if the cookie
+        # should be set or not.  This is controlled by the
+        # SESSION_REFRESH_EACH_REQUEST config flag as well as
+        # the permanent flag on the session itself.
+        # if not self.should_set_cookie(app, session):
+        #    return
+        conditional_cookie_kwargs = {}
+        httponly = self.get_cookie_httponly(app)
+        secure = self.get_cookie_secure(app)
+        if self.has_same_site_capability:
+            conditional_cookie_kwargs["samesite"] = self.get_cookie_samesite(app)
+        expires = self.get_expiration_time(app, session)
+        val = self.serializer.dumps(dict(session))
+        self.redis.setex(name=self.key_prefix + session.sid, value=val,
+                         time=total_seconds(app.permanent_session_lifetime))
+        if self.use_signer:
+            session_id = self._get_signer(app).sign(want_bytes(session.sid))
+        else:
+            session_id = session.sid
+        response.set_cookie(app.session_cookie_name, session_id,
+                            expires=expires, httponly=httponly,
+                            domain=domain, path=path, secure=secure,
+                            **conditional_cookie_kwargs)
+
+
+class MemcachedSessionInterface(SessionInterface):
+    """A Session interface that uses memcached as backend.
+
+    .. versionadded:: 0.2
+        The `use_signer` parameter was added.
+
+    :param client: A ``memcache.Client`` instance.
+    :param key_prefix: A prefix that is added to all Memcached store keys.
+    :param use_signer: Whether to sign the session id cookie or not.
+    :param permanent: Whether to use permanent session or not.
+    """
+
+    serializer = pickle
+    session_class = MemcachedSession
+
+    def __init__(self, client, key_prefix, use_signer=False, permanent=True):
+        if client is None:
+            client = self._get_preferred_memcache_client()
+            if client is None:
+                raise RuntimeError('no memcache module found')
+        self.client = client
+        self.key_prefix = key_prefix
+        self.use_signer = use_signer
+        self.permanent = permanent
+        self.has_same_site_capability = hasattr(self, "get_cookie_samesite")
+
+    def _get_preferred_memcache_client(self):
+        servers = ['127.0.0.1:11211']
+        try:
+            import pylibmc
+        except ImportError:
+            pass
+        else:
+            return pylibmc.Client(servers)
+
+        try:
+            import memcache
+        except ImportError:
+            pass
+        else:
+            return memcache.Client(servers)
+
+    def _get_memcache_timeout(self, timeout):
+        """
+        Memcached deals with long (> 30 days) timeouts in a special
+        way. Call this function to obtain a safe value for your timeout.
+        """
+        if timeout > 2592000:  # 60*60*24*30, 30 days
+            # See http://code.google.com/p/memcached/wiki/FAQ
+            # "You can set expire times up to 30 days in the future. After that
+            # memcached interprets it as a date, and will expire the item after
+            # said date. This is a simple (but obscure) mechanic."
+            #
+            # This means that we have to switch to absolute timestamps.
+            timeout += int(time.time())
+        return timeout
+
+    def open_session(self, app, request):
+        sid = request.cookies.get(app.session_cookie_name)
+        if not sid:
+            sid = self._generate_sid()
+            return self.session_class(sid=sid, permanent=self.permanent)
+        if self.use_signer:
+            signer = self._get_signer(app)
+            if signer is None:
+                return None
+            try:
+                sid_as_bytes = signer.unsign(sid)
+                sid = sid_as_bytes.decode()
+            except BadSignature:
+                sid = self._generate_sid()
+                return self.session_class(sid=sid, permanent=self.permanent)
+
+        full_session_key = self.key_prefix + sid
+        if PY2 and isinstance(full_session_key, unicode):
+            full_session_key = full_session_key.encode('utf-8')
+        val = self.client.get(full_session_key)
+        if val is not None:
+            try:
+                if not PY2:
+                    val = want_bytes(val)
+                data = self.serializer.loads(val)
+                return self.session_class(data, sid=sid)
+            except:
+                return self.session_class(sid=sid, permanent=self.permanent)
+        return self.session_class(sid=sid, permanent=self.permanent)
+
+    def save_session(self, app, session, response):
+        domain = self.get_cookie_domain(app)
+        path = self.get_cookie_path(app)
+        full_session_key = self.key_prefix + session.sid
+        if PY2 and isinstance(full_session_key, unicode):
+            full_session_key = full_session_key.encode('utf-8')
+        if not session:
+            if session.modified:
+                self.client.delete(full_session_key)
+                response.delete_cookie(app.session_cookie_name,
+                                       domain=domain, path=path)
+            return
+
+        conditional_cookie_kwargs = {}
+        httponly = self.get_cookie_httponly(app)
+        secure = self.get_cookie_secure(app)
+        if self.has_same_site_capability:
+            conditional_cookie_kwargs["samesite"] = self.get_cookie_samesite(app)
+        expires = self.get_expiration_time(app, session)
+        if not PY2:
+            val = self.serializer.dumps(dict(session), 0)
+        else:
+            val = self.serializer.dumps(dict(session))
+        self.client.set(full_session_key, val, self._get_memcache_timeout(
+                        total_seconds(app.permanent_session_lifetime)))
+        if self.use_signer:
+            session_id = self._get_signer(app).sign(want_bytes(session.sid))
+        else:
+            session_id = session.sid
+        response.set_cookie(app.session_cookie_name, session_id,
+                            expires=expires, httponly=httponly,
+                            domain=domain, path=path, secure=secure,
+                            **conditional_cookie_kwargs)
+
+
+class FileSystemSessionInterface(SessionInterface):
+    """Uses the :class:`cachelib.file.FileSystemCache` as a session backend.
+
+    .. versionadded:: 0.2
+        The `use_signer` parameter was added.
+
+    :param cache_dir: the directory where session files are stored.
+    :param threshold: the maximum number of items the session stores before it
+                      starts deleting some.
+    :param mode: the file mode wanted for the session files, default 0600
+    :param key_prefix: A prefix that is added to FileSystemCache store keys.
+    :param use_signer: Whether to sign the session id cookie or not.
+    :param permanent: Whether to use permanent session or not.
+    """
+
+    session_class = FileSystemSession
+
+    def __init__(self, cache_dir, threshold, mode, key_prefix,
+                 use_signer=False, permanent=True):
+        from cachelib.file import FileSystemCache
+        self.cache = FileSystemCache(cache_dir, threshold=threshold, mode=mode)
+        self.key_prefix = key_prefix
+        self.use_signer = use_signer
+        self.permanent = permanent
+        self.has_same_site_capability = hasattr(self, "get_cookie_samesite")
+
+    def open_session(self, app, request):
+        sid = request.cookies.get(app.session_cookie_name)
+        if not sid:
+            sid = self._generate_sid()
+            return self.session_class(sid=sid, permanent=self.permanent)
+        if self.use_signer:
+            signer = self._get_signer(app)
+            if signer is None:
+                return None
+            try:
+                sid_as_bytes = signer.unsign(sid)
+                sid = sid_as_bytes.decode()
+            except BadSignature:
+                sid = self._generate_sid()
+                return self.session_class(sid=sid, permanent=self.permanent)
+
+        data = self.cache.get(self.key_prefix + sid)
+        if data is not None:
+            return self.session_class(data, sid=sid)
+        return self.session_class(sid=sid, permanent=self.permanent)
+
+    def save_session(self, app, session, response):
+        domain = self.get_cookie_domain(app)
+        path = self.get_cookie_path(app)
+        if not session:
+            if session.modified:
+                self.cache.delete(self.key_prefix + session.sid)
+                response.delete_cookie(app.session_cookie_name,
+                                       domain=domain, path=path)
+            return
+
+        conditional_cookie_kwargs = {}
+        httponly = self.get_cookie_httponly(app)
+        secure = self.get_cookie_secure(app)
+        if self.has_same_site_capability:
+            conditional_cookie_kwargs["samesite"] = self.get_cookie_samesite(app)
+        expires = self.get_expiration_time(app, session)
+        data = dict(session)
+        self.cache.set(self.key_prefix + session.sid, data,
+                       total_seconds(app.permanent_session_lifetime))
+        if self.use_signer:
+            session_id = self._get_signer(app).sign(want_bytes(session.sid))
+        else:
+            session_id = session.sid
+        response.set_cookie(app.session_cookie_name, session_id,
+                            expires=expires, httponly=httponly,
+                            domain=domain, path=path, secure=secure,
+                            **conditional_cookie_kwargs)
+
+
+class MongoDBSessionInterface(SessionInterface):
+    """A Session interface that uses mongodb as backend.
+
+    .. versionadded:: 0.2
+        The `use_signer` parameter was added.
+
+    :param client: A ``pymongo.MongoClient`` instance.
+    :param db: The database you want to use.
+    :param collection: The collection you want to use.
+    :param key_prefix: A prefix that is added to all MongoDB store keys.
+    :param use_signer: Whether to sign the session id cookie or not.
+    :param permanent: Whether to use permanent session or not.
+    """
+
+    serializer = pickle
+    session_class = MongoDBSession
+
+    def __init__(self, client, db, collection, key_prefix, use_signer=False,
+                 permanent=True):
+        if client is None:
+            from pymongo import MongoClient
+            client = MongoClient()
+        self.client = client
+        self.store = client[db][collection]
+        self.key_prefix = key_prefix
+        self.use_signer = use_signer
+        self.permanent = permanent
+        self.has_same_site_capability = hasattr(self, "get_cookie_samesite")
+
+    def open_session(self, app, request):
+        sid = request.cookies.get(app.session_cookie_name)
+        if not sid:
+            sid = self._generate_sid()
+            return self.session_class(sid=sid, permanent=self.permanent)
+        if self.use_signer:
+            signer = self._get_signer(app)
+            if signer is None:
+                return None
+            try:
+                sid_as_bytes = signer.unsign(sid)
+                sid = sid_as_bytes.decode()
+            except BadSignature:
+                sid = self._generate_sid()
+                return self.session_class(sid=sid, permanent=self.permanent)
+
+        store_id = self.key_prefix + sid
+        document = self.store.find_one({'id': store_id})
+        if document and document.get('expiration') <= datetime.utcnow():
+            # Delete expired session
+            self.store.remove({'id': store_id})
+            document = None
+        if document is not None:
+            try:
+                val = document['val']
+                data = self.serializer.loads(want_bytes(val))
+                return self.session_class(data, sid=sid)
+            except:
+                return self.session_class(sid=sid, permanent=self.permanent)
+        return self.session_class(sid=sid, permanent=self.permanent)
+
+    def save_session(self, app, session, response):
+        domain = self.get_cookie_domain(app)
+        path = self.get_cookie_path(app)
+        store_id = self.key_prefix + session.sid
+        if not session:
+            if session.modified:
+                self.store.remove({'id': store_id})
+                response.delete_cookie(app.session_cookie_name,
+                                       domain=domain, path=path)
+            return
+
+        conditional_cookie_kwargs = {}
+        httponly = self.get_cookie_httponly(app)
+        secure = self.get_cookie_secure(app)
+        if self.has_same_site_capability:
+            conditional_cookie_kwargs["samesite"] = self.get_cookie_samesite(app)
+        expires = self.get_expiration_time(app, session)
+        val = self.serializer.dumps(dict(session))
+        self.store.update({'id': store_id},
+                          {'id': store_id,
+                           'val': val,
+                           'expiration': expires}, True)
+        if self.use_signer:
+            session_id = self._get_signer(app).sign(want_bytes(session.sid))
+        else:
+            session_id = session.sid
+        response.set_cookie(app.session_cookie_name, session_id,
+                            expires=expires, httponly=httponly,
+                            domain=domain, path=path, secure=secure,
+                            **conditional_cookie_kwargs)
+
+
+class SqlAlchemySessionInterface(SessionInterface):
+    """Uses the Flask-SQLAlchemy from a flask app as a session backend.
+
+    .. versionadded:: 0.2
+
+    :param app: A Flask app instance.
+    :param db: A Flask-SQLAlchemy instance.
+    :param table: The table name you want to use.
+    :param key_prefix: A prefix that is added to all store keys.
+    :param use_signer: Whether to sign the session id cookie or not.
+    :param permanent: Whether to use permanent session or not.
+    """
+
+    serializer = pickle
+    session_class = SqlAlchemySession
+
+    def __init__(self, app, db, table, key_prefix, use_signer=False,
+                 permanent=True):
+        if db is None:
+            from flask_sqlalchemy import SQLAlchemy
+            db = SQLAlchemy(app)
+        self.db = db
+        self.key_prefix = key_prefix
+        self.use_signer = use_signer
+        self.permanent = permanent
+        self.has_same_site_capability = hasattr(self, "get_cookie_samesite")
+
+        class Session(self.db.Model):
+            __tablename__ = table
+
+            id = self.db.Column(self.db.Integer, primary_key=True)
+            session_id = self.db.Column(self.db.String(255), unique=True)
+            data = self.db.Column(self.db.LargeBinary)
+            expiry = self.db.Column(self.db.DateTime)
+
+            def __init__(self, session_id, data, expiry):
+                self.session_id = session_id
+                self.data = data
+                self.expiry = expiry
+
+            def __repr__(self):
+                return '<Session data %s>' % self.data
+
+        # self.db.create_all()
+        self.sql_session_model = Session
+
+    def open_session(self, app, request):
+        sid = request.cookies.get(app.session_cookie_name)
+        if not sid:
+            sid = self._generate_sid()
+            return self.session_class(sid=sid, permanent=self.permanent)
+        if self.use_signer:
+            signer = self._get_signer(app)
+            if signer is None:
+                return None
+            try:
+                sid_as_bytes = signer.unsign(sid)
+                sid = sid_as_bytes.decode()
+            except BadSignature:
+                sid = self._generate_sid()
+                return self.session_class(sid=sid, permanent=self.permanent)
+
+        store_id = self.key_prefix + sid
+        saved_session = self.sql_session_model.query.filter_by(
+            session_id=store_id).first()
+        if saved_session and saved_session.expiry <= datetime.utcnow():
+            # Delete expired session
+            self.db.session.delete(saved_session)
+            self.db.session.commit()
+            saved_session = None
+        if saved_session:
+            try:
+                val = saved_session.data
+                data = self.serializer.loads(want_bytes(val))
+                return self.session_class(data, sid=sid)
+            except:
+                return self.session_class(sid=sid, permanent=self.permanent)
+        return self.session_class(sid=sid, permanent=self.permanent)
+
+    def save_session(self, app, session, response):
+        domain = self.get_cookie_domain(app)
+        path = self.get_cookie_path(app)
+        store_id = self.key_prefix + session.sid
+        saved_session = self.sql_session_model.query.filter_by(
+            session_id=store_id).first()
+        if not session:
+            if session.modified:
+                if saved_session:
+                    self.db.session.delete(saved_session)
+                    self.db.session.commit()
+                response.delete_cookie(app.session_cookie_name,
+                                       domain=domain, path=path)
+            return
+
+        conditional_cookie_kwargs = {}
+        httponly = self.get_cookie_httponly(app)
+        secure = self.get_cookie_secure(app)
+        if self.has_same_site_capability:
+            conditional_cookie_kwargs["samesite"] = self.get_cookie_samesite(app)
+        expires = self.get_expiration_time(app, session)
+        val = self.serializer.dumps(dict(session))
+        if saved_session:
+            saved_session.data = val
+            saved_session.expiry = expires
+            self.db.session.commit()
+        else:
+            new_session = self.sql_session_model(store_id, val, expires)
+            self.db.session.add(new_session)
+            self.db.session.commit()
+        if self.use_signer:
+            session_id = self._get_signer(app).sign(want_bytes(session.sid))
+        else:
+            session_id = session.sid
+        response.set_cookie(app.session_cookie_name, session_id,
+                            expires=expires, httponly=httponly,
+                            domain=domain, path=path, secure=secure,
+                            **conditional_cookie_kwargs)